Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- evalkit_cambrian/lib/python3.10/site-packages/starlette/authentication.py +147 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/background.py +41 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/config.py +138 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/convertors.py +89 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/datastructures.py +679 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/formparsers.py +271 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/__init__.py +41 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/cors.py +172 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/exceptions.py +72 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/httpsredirect.py +19 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/trustedhost.py +60 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/wsgi.py +152 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/py.typed +0 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/responses.py +531 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/schemas.py +147 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/staticfiles.py +220 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/templating.py +216 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/testclient.py +791 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/types.py +24 -0
- evalkit_cambrian/lib/python3.10/site-packages/starlette/websockets.py +195 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cdist_forward_ops.h +39 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cholesky_solve_helper.h +39 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_coalesce_native.h +23 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cudnn_ctc_loss_cuda_dispatch.h +24 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cudnn_init_dropout_state_native.h +22 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_abs.h +44 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_log1p.h +44 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_indices_native.h +21 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_log_softmax_backward_data_meta.h +27 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_log_softmax_meta_dispatch.h +25 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_make_dual_copy.h +39 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_make_per_tensor_quantized_tensor_ops.h +39 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_nested_tensor_softmax_with_shape.h +30 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_reshape_from_tensor_native.h +21 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_efficient_attention_ops.h +28 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_to_cpu.h +30 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_version_ops.h +28 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/adaptive_avg_pool3d_backward_native.h +22 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/adaptive_max_pool3d_ops.h +39 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/affine_grid_generator_native.h +22 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/align_tensors_ops.h +28 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool3d_native.h +29 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/bucketize_native.h +27 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/cudnn_grid_sampler.h +39 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/div_compositeexplicitautogradnonfunctional_dispatch.h +26 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/eq.h +53 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/equal_native.h +23 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/eye_cuda_dispatch.h +30 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_cuda_dispatch.h +23 -0
- infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/fft_fftfreq_compositeexplicitautograd_dispatch.h +26 -0
evalkit_cambrian/lib/python3.10/site-packages/starlette/authentication.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import functools
|
| 4 |
+
import inspect
|
| 5 |
+
import sys
|
| 6 |
+
import typing
|
| 7 |
+
from urllib.parse import urlencode
|
| 8 |
+
|
| 9 |
+
if sys.version_info >= (3, 10): # pragma: no cover
|
| 10 |
+
from typing import ParamSpec
|
| 11 |
+
else: # pragma: no cover
|
| 12 |
+
from typing_extensions import ParamSpec
|
| 13 |
+
|
| 14 |
+
from starlette._utils import is_async_callable
|
| 15 |
+
from starlette.exceptions import HTTPException
|
| 16 |
+
from starlette.requests import HTTPConnection, Request
|
| 17 |
+
from starlette.responses import RedirectResponse
|
| 18 |
+
from starlette.websockets import WebSocket
|
| 19 |
+
|
| 20 |
+
_P = ParamSpec("_P")
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def has_required_scope(conn: HTTPConnection, scopes: typing.Sequence[str]) -> bool:
|
| 24 |
+
for scope in scopes:
|
| 25 |
+
if scope not in conn.auth.scopes:
|
| 26 |
+
return False
|
| 27 |
+
return True
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def requires(
|
| 31 |
+
scopes: str | typing.Sequence[str],
|
| 32 |
+
status_code: int = 403,
|
| 33 |
+
redirect: str | None = None,
|
| 34 |
+
) -> typing.Callable[[typing.Callable[_P, typing.Any]], typing.Callable[_P, typing.Any]]:
|
| 35 |
+
scopes_list = [scopes] if isinstance(scopes, str) else list(scopes)
|
| 36 |
+
|
| 37 |
+
def decorator(
|
| 38 |
+
func: typing.Callable[_P, typing.Any],
|
| 39 |
+
) -> typing.Callable[_P, typing.Any]:
|
| 40 |
+
sig = inspect.signature(func)
|
| 41 |
+
for idx, parameter in enumerate(sig.parameters.values()):
|
| 42 |
+
if parameter.name == "request" or parameter.name == "websocket":
|
| 43 |
+
type_ = parameter.name
|
| 44 |
+
break
|
| 45 |
+
else:
|
| 46 |
+
raise Exception(f'No "request" or "websocket" argument on function "{func}"')
|
| 47 |
+
|
| 48 |
+
if type_ == "websocket":
|
| 49 |
+
# Handle websocket functions. (Always async)
|
| 50 |
+
@functools.wraps(func)
|
| 51 |
+
async def websocket_wrapper(*args: _P.args, **kwargs: _P.kwargs) -> None:
|
| 52 |
+
websocket = kwargs.get("websocket", args[idx] if idx < len(args) else None)
|
| 53 |
+
assert isinstance(websocket, WebSocket)
|
| 54 |
+
|
| 55 |
+
if not has_required_scope(websocket, scopes_list):
|
| 56 |
+
await websocket.close()
|
| 57 |
+
else:
|
| 58 |
+
await func(*args, **kwargs)
|
| 59 |
+
|
| 60 |
+
return websocket_wrapper
|
| 61 |
+
|
| 62 |
+
elif is_async_callable(func):
|
| 63 |
+
# Handle async request/response functions.
|
| 64 |
+
@functools.wraps(func)
|
| 65 |
+
async def async_wrapper(*args: _P.args, **kwargs: _P.kwargs) -> typing.Any:
|
| 66 |
+
request = kwargs.get("request", args[idx] if idx < len(args) else None)
|
| 67 |
+
assert isinstance(request, Request)
|
| 68 |
+
|
| 69 |
+
if not has_required_scope(request, scopes_list):
|
| 70 |
+
if redirect is not None:
|
| 71 |
+
orig_request_qparam = urlencode({"next": str(request.url)})
|
| 72 |
+
next_url = f"{request.url_for(redirect)}?{orig_request_qparam}"
|
| 73 |
+
return RedirectResponse(url=next_url, status_code=303)
|
| 74 |
+
raise HTTPException(status_code=status_code)
|
| 75 |
+
return await func(*args, **kwargs)
|
| 76 |
+
|
| 77 |
+
return async_wrapper
|
| 78 |
+
|
| 79 |
+
else:
|
| 80 |
+
# Handle sync request/response functions.
|
| 81 |
+
@functools.wraps(func)
|
| 82 |
+
def sync_wrapper(*args: _P.args, **kwargs: _P.kwargs) -> typing.Any:
|
| 83 |
+
request = kwargs.get("request", args[idx] if idx < len(args) else None)
|
| 84 |
+
assert isinstance(request, Request)
|
| 85 |
+
|
| 86 |
+
if not has_required_scope(request, scopes_list):
|
| 87 |
+
if redirect is not None:
|
| 88 |
+
orig_request_qparam = urlencode({"next": str(request.url)})
|
| 89 |
+
next_url = f"{request.url_for(redirect)}?{orig_request_qparam}"
|
| 90 |
+
return RedirectResponse(url=next_url, status_code=303)
|
| 91 |
+
raise HTTPException(status_code=status_code)
|
| 92 |
+
return func(*args, **kwargs)
|
| 93 |
+
|
| 94 |
+
return sync_wrapper
|
| 95 |
+
|
| 96 |
+
return decorator
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class AuthenticationError(Exception):
|
| 100 |
+
pass
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class AuthenticationBackend:
|
| 104 |
+
async def authenticate(self, conn: HTTPConnection) -> tuple[AuthCredentials, BaseUser] | None:
|
| 105 |
+
raise NotImplementedError() # pragma: no cover
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
class AuthCredentials:
|
| 109 |
+
def __init__(self, scopes: typing.Sequence[str] | None = None):
|
| 110 |
+
self.scopes = [] if scopes is None else list(scopes)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class BaseUser:
|
| 114 |
+
@property
|
| 115 |
+
def is_authenticated(self) -> bool:
|
| 116 |
+
raise NotImplementedError() # pragma: no cover
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def display_name(self) -> str:
|
| 120 |
+
raise NotImplementedError() # pragma: no cover
|
| 121 |
+
|
| 122 |
+
@property
|
| 123 |
+
def identity(self) -> str:
|
| 124 |
+
raise NotImplementedError() # pragma: no cover
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class SimpleUser(BaseUser):
|
| 128 |
+
def __init__(self, username: str) -> None:
|
| 129 |
+
self.username = username
|
| 130 |
+
|
| 131 |
+
@property
|
| 132 |
+
def is_authenticated(self) -> bool:
|
| 133 |
+
return True
|
| 134 |
+
|
| 135 |
+
@property
|
| 136 |
+
def display_name(self) -> str:
|
| 137 |
+
return self.username
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class UnauthenticatedUser(BaseUser):
|
| 141 |
+
@property
|
| 142 |
+
def is_authenticated(self) -> bool:
|
| 143 |
+
return False
|
| 144 |
+
|
| 145 |
+
@property
|
| 146 |
+
def display_name(self) -> str:
|
| 147 |
+
return ""
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/background.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
import typing
|
| 5 |
+
|
| 6 |
+
if sys.version_info >= (3, 10): # pragma: no cover
|
| 7 |
+
from typing import ParamSpec
|
| 8 |
+
else: # pragma: no cover
|
| 9 |
+
from typing_extensions import ParamSpec
|
| 10 |
+
|
| 11 |
+
from starlette._utils import is_async_callable
|
| 12 |
+
from starlette.concurrency import run_in_threadpool
|
| 13 |
+
|
| 14 |
+
P = ParamSpec("P")
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class BackgroundTask:
|
| 18 |
+
def __init__(self, func: typing.Callable[P, typing.Any], *args: P.args, **kwargs: P.kwargs) -> None:
|
| 19 |
+
self.func = func
|
| 20 |
+
self.args = args
|
| 21 |
+
self.kwargs = kwargs
|
| 22 |
+
self.is_async = is_async_callable(func)
|
| 23 |
+
|
| 24 |
+
async def __call__(self) -> None:
|
| 25 |
+
if self.is_async:
|
| 26 |
+
await self.func(*self.args, **self.kwargs)
|
| 27 |
+
else:
|
| 28 |
+
await run_in_threadpool(self.func, *self.args, **self.kwargs)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class BackgroundTasks(BackgroundTask):
|
| 32 |
+
def __init__(self, tasks: typing.Sequence[BackgroundTask] | None = None):
|
| 33 |
+
self.tasks = list(tasks) if tasks else []
|
| 34 |
+
|
| 35 |
+
def add_task(self, func: typing.Callable[P, typing.Any], *args: P.args, **kwargs: P.kwargs) -> None:
|
| 36 |
+
task = BackgroundTask(func, *args, **kwargs)
|
| 37 |
+
self.tasks.append(task)
|
| 38 |
+
|
| 39 |
+
async def __call__(self) -> None:
|
| 40 |
+
for task in self.tasks:
|
| 41 |
+
await task()
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/config.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import typing
|
| 5 |
+
import warnings
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class undefined:
|
| 10 |
+
pass
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class EnvironError(Exception):
|
| 14 |
+
pass
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class Environ(typing.MutableMapping[str, str]):
|
| 18 |
+
def __init__(self, environ: typing.MutableMapping[str, str] = os.environ):
|
| 19 |
+
self._environ = environ
|
| 20 |
+
self._has_been_read: set[str] = set()
|
| 21 |
+
|
| 22 |
+
def __getitem__(self, key: str) -> str:
|
| 23 |
+
self._has_been_read.add(key)
|
| 24 |
+
return self._environ.__getitem__(key)
|
| 25 |
+
|
| 26 |
+
def __setitem__(self, key: str, value: str) -> None:
|
| 27 |
+
if key in self._has_been_read:
|
| 28 |
+
raise EnvironError(f"Attempting to set environ['{key}'], but the value has already been read.")
|
| 29 |
+
self._environ.__setitem__(key, value)
|
| 30 |
+
|
| 31 |
+
def __delitem__(self, key: str) -> None:
|
| 32 |
+
if key in self._has_been_read:
|
| 33 |
+
raise EnvironError(f"Attempting to delete environ['{key}'], but the value has already been read.")
|
| 34 |
+
self._environ.__delitem__(key)
|
| 35 |
+
|
| 36 |
+
def __iter__(self) -> typing.Iterator[str]:
|
| 37 |
+
return iter(self._environ)
|
| 38 |
+
|
| 39 |
+
def __len__(self) -> int:
|
| 40 |
+
return len(self._environ)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
environ = Environ()
|
| 44 |
+
|
| 45 |
+
T = typing.TypeVar("T")
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class Config:
|
| 49 |
+
def __init__(
|
| 50 |
+
self,
|
| 51 |
+
env_file: str | Path | None = None,
|
| 52 |
+
environ: typing.Mapping[str, str] = environ,
|
| 53 |
+
env_prefix: str = "",
|
| 54 |
+
) -> None:
|
| 55 |
+
self.environ = environ
|
| 56 |
+
self.env_prefix = env_prefix
|
| 57 |
+
self.file_values: dict[str, str] = {}
|
| 58 |
+
if env_file is not None:
|
| 59 |
+
if not os.path.isfile(env_file):
|
| 60 |
+
warnings.warn(f"Config file '{env_file}' not found.")
|
| 61 |
+
else:
|
| 62 |
+
self.file_values = self._read_file(env_file)
|
| 63 |
+
|
| 64 |
+
@typing.overload
|
| 65 |
+
def __call__(self, key: str, *, default: None) -> str | None: ...
|
| 66 |
+
|
| 67 |
+
@typing.overload
|
| 68 |
+
def __call__(self, key: str, cast: type[T], default: T = ...) -> T: ...
|
| 69 |
+
|
| 70 |
+
@typing.overload
|
| 71 |
+
def __call__(self, key: str, cast: type[str] = ..., default: str = ...) -> str: ...
|
| 72 |
+
|
| 73 |
+
@typing.overload
|
| 74 |
+
def __call__(
|
| 75 |
+
self,
|
| 76 |
+
key: str,
|
| 77 |
+
cast: typing.Callable[[typing.Any], T] = ...,
|
| 78 |
+
default: typing.Any = ...,
|
| 79 |
+
) -> T: ...
|
| 80 |
+
|
| 81 |
+
@typing.overload
|
| 82 |
+
def __call__(self, key: str, cast: type[str] = ..., default: T = ...) -> T | str: ...
|
| 83 |
+
|
| 84 |
+
def __call__(
|
| 85 |
+
self,
|
| 86 |
+
key: str,
|
| 87 |
+
cast: typing.Callable[[typing.Any], typing.Any] | None = None,
|
| 88 |
+
default: typing.Any = undefined,
|
| 89 |
+
) -> typing.Any:
|
| 90 |
+
return self.get(key, cast, default)
|
| 91 |
+
|
| 92 |
+
def get(
|
| 93 |
+
self,
|
| 94 |
+
key: str,
|
| 95 |
+
cast: typing.Callable[[typing.Any], typing.Any] | None = None,
|
| 96 |
+
default: typing.Any = undefined,
|
| 97 |
+
) -> typing.Any:
|
| 98 |
+
key = self.env_prefix + key
|
| 99 |
+
if key in self.environ:
|
| 100 |
+
value = self.environ[key]
|
| 101 |
+
return self._perform_cast(key, value, cast)
|
| 102 |
+
if key in self.file_values:
|
| 103 |
+
value = self.file_values[key]
|
| 104 |
+
return self._perform_cast(key, value, cast)
|
| 105 |
+
if default is not undefined:
|
| 106 |
+
return self._perform_cast(key, default, cast)
|
| 107 |
+
raise KeyError(f"Config '{key}' is missing, and has no default.")
|
| 108 |
+
|
| 109 |
+
def _read_file(self, file_name: str | Path) -> dict[str, str]:
|
| 110 |
+
file_values: dict[str, str] = {}
|
| 111 |
+
with open(file_name) as input_file:
|
| 112 |
+
for line in input_file.readlines():
|
| 113 |
+
line = line.strip()
|
| 114 |
+
if "=" in line and not line.startswith("#"):
|
| 115 |
+
key, value = line.split("=", 1)
|
| 116 |
+
key = key.strip()
|
| 117 |
+
value = value.strip().strip("\"'")
|
| 118 |
+
file_values[key] = value
|
| 119 |
+
return file_values
|
| 120 |
+
|
| 121 |
+
def _perform_cast(
|
| 122 |
+
self,
|
| 123 |
+
key: str,
|
| 124 |
+
value: typing.Any,
|
| 125 |
+
cast: typing.Callable[[typing.Any], typing.Any] | None = None,
|
| 126 |
+
) -> typing.Any:
|
| 127 |
+
if cast is None or value is None:
|
| 128 |
+
return value
|
| 129 |
+
elif cast is bool and isinstance(value, str):
|
| 130 |
+
mapping = {"true": True, "1": True, "false": False, "0": False}
|
| 131 |
+
value = value.lower()
|
| 132 |
+
if value not in mapping:
|
| 133 |
+
raise ValueError(f"Config '{key}' has value '{value}'. Not a valid bool.")
|
| 134 |
+
return mapping[value]
|
| 135 |
+
try:
|
| 136 |
+
return cast(value)
|
| 137 |
+
except (TypeError, ValueError):
|
| 138 |
+
raise ValueError(f"Config '{key}' has value '{value}'. Not a valid {cast.__name__}.")
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/convertors.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
import typing
|
| 5 |
+
import uuid
|
| 6 |
+
|
| 7 |
+
T = typing.TypeVar("T")
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class Convertor(typing.Generic[T]):
|
| 11 |
+
regex: typing.ClassVar[str] = ""
|
| 12 |
+
|
| 13 |
+
def convert(self, value: str) -> T:
|
| 14 |
+
raise NotImplementedError() # pragma: no cover
|
| 15 |
+
|
| 16 |
+
def to_string(self, value: T) -> str:
|
| 17 |
+
raise NotImplementedError() # pragma: no cover
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class StringConvertor(Convertor[str]):
|
| 21 |
+
regex = "[^/]+"
|
| 22 |
+
|
| 23 |
+
def convert(self, value: str) -> str:
|
| 24 |
+
return value
|
| 25 |
+
|
| 26 |
+
def to_string(self, value: str) -> str:
|
| 27 |
+
value = str(value)
|
| 28 |
+
assert "/" not in value, "May not contain path separators"
|
| 29 |
+
assert value, "Must not be empty"
|
| 30 |
+
return value
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class PathConvertor(Convertor[str]):
|
| 34 |
+
regex = ".*"
|
| 35 |
+
|
| 36 |
+
def convert(self, value: str) -> str:
|
| 37 |
+
return str(value)
|
| 38 |
+
|
| 39 |
+
def to_string(self, value: str) -> str:
|
| 40 |
+
return str(value)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class IntegerConvertor(Convertor[int]):
|
| 44 |
+
regex = "[0-9]+"
|
| 45 |
+
|
| 46 |
+
def convert(self, value: str) -> int:
|
| 47 |
+
return int(value)
|
| 48 |
+
|
| 49 |
+
def to_string(self, value: int) -> str:
|
| 50 |
+
value = int(value)
|
| 51 |
+
assert value >= 0, "Negative integers are not supported"
|
| 52 |
+
return str(value)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class FloatConvertor(Convertor[float]):
|
| 56 |
+
regex = r"[0-9]+(\.[0-9]+)?"
|
| 57 |
+
|
| 58 |
+
def convert(self, value: str) -> float:
|
| 59 |
+
return float(value)
|
| 60 |
+
|
| 61 |
+
def to_string(self, value: float) -> str:
|
| 62 |
+
value = float(value)
|
| 63 |
+
assert value >= 0.0, "Negative floats are not supported"
|
| 64 |
+
assert not math.isnan(value), "NaN values are not supported"
|
| 65 |
+
assert not math.isinf(value), "Infinite values are not supported"
|
| 66 |
+
return ("%0.20f" % value).rstrip("0").rstrip(".")
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class UUIDConvertor(Convertor[uuid.UUID]):
|
| 70 |
+
regex = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"
|
| 71 |
+
|
| 72 |
+
def convert(self, value: str) -> uuid.UUID:
|
| 73 |
+
return uuid.UUID(value)
|
| 74 |
+
|
| 75 |
+
def to_string(self, value: uuid.UUID) -> str:
|
| 76 |
+
return str(value)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
CONVERTOR_TYPES: dict[str, Convertor[typing.Any]] = {
|
| 80 |
+
"str": StringConvertor(),
|
| 81 |
+
"path": PathConvertor(),
|
| 82 |
+
"int": IntegerConvertor(),
|
| 83 |
+
"float": FloatConvertor(),
|
| 84 |
+
"uuid": UUIDConvertor(),
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def register_url_convertor(key: str, convertor: Convertor[typing.Any]) -> None:
|
| 89 |
+
CONVERTOR_TYPES[key] = convertor
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/datastructures.py
ADDED
|
@@ -0,0 +1,679 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import typing
|
| 4 |
+
from shlex import shlex
|
| 5 |
+
from urllib.parse import SplitResult, parse_qsl, urlencode, urlsplit
|
| 6 |
+
|
| 7 |
+
from starlette.concurrency import run_in_threadpool
|
| 8 |
+
from starlette.types import Scope
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class Address(typing.NamedTuple):
|
| 12 |
+
host: str
|
| 13 |
+
port: int
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
_KeyType = typing.TypeVar("_KeyType")
|
| 17 |
+
# Mapping keys are invariant but their values are covariant since
|
| 18 |
+
# you can only read them
|
| 19 |
+
# that is, you can't do `Mapping[str, Animal]()["fido"] = Dog()`
|
| 20 |
+
_CovariantValueType = typing.TypeVar("_CovariantValueType", covariant=True)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class URL:
|
| 24 |
+
def __init__(
|
| 25 |
+
self,
|
| 26 |
+
url: str = "",
|
| 27 |
+
scope: Scope | None = None,
|
| 28 |
+
**components: typing.Any,
|
| 29 |
+
) -> None:
|
| 30 |
+
if scope is not None:
|
| 31 |
+
assert not url, 'Cannot set both "url" and "scope".'
|
| 32 |
+
assert not components, 'Cannot set both "scope" and "**components".'
|
| 33 |
+
scheme = scope.get("scheme", "http")
|
| 34 |
+
server = scope.get("server", None)
|
| 35 |
+
path = scope["path"]
|
| 36 |
+
query_string = scope.get("query_string", b"")
|
| 37 |
+
|
| 38 |
+
host_header = None
|
| 39 |
+
for key, value in scope["headers"]:
|
| 40 |
+
if key == b"host":
|
| 41 |
+
host_header = value.decode("latin-1")
|
| 42 |
+
break
|
| 43 |
+
|
| 44 |
+
if host_header is not None:
|
| 45 |
+
url = f"{scheme}://{host_header}{path}"
|
| 46 |
+
elif server is None:
|
| 47 |
+
url = path
|
| 48 |
+
else:
|
| 49 |
+
host, port = server
|
| 50 |
+
default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
|
| 51 |
+
if port == default_port:
|
| 52 |
+
url = f"{scheme}://{host}{path}"
|
| 53 |
+
else:
|
| 54 |
+
url = f"{scheme}://{host}:{port}{path}"
|
| 55 |
+
|
| 56 |
+
if query_string:
|
| 57 |
+
url += "?" + query_string.decode()
|
| 58 |
+
elif components:
|
| 59 |
+
assert not url, 'Cannot set both "url" and "**components".'
|
| 60 |
+
url = URL("").replace(**components).components.geturl()
|
| 61 |
+
|
| 62 |
+
self._url = url
|
| 63 |
+
|
| 64 |
+
@property
|
| 65 |
+
def components(self) -> SplitResult:
|
| 66 |
+
if not hasattr(self, "_components"):
|
| 67 |
+
self._components = urlsplit(self._url)
|
| 68 |
+
return self._components
|
| 69 |
+
|
| 70 |
+
@property
|
| 71 |
+
def scheme(self) -> str:
|
| 72 |
+
return self.components.scheme
|
| 73 |
+
|
| 74 |
+
@property
|
| 75 |
+
def netloc(self) -> str:
|
| 76 |
+
return self.components.netloc
|
| 77 |
+
|
| 78 |
+
@property
|
| 79 |
+
def path(self) -> str:
|
| 80 |
+
return self.components.path
|
| 81 |
+
|
| 82 |
+
@property
|
| 83 |
+
def query(self) -> str:
|
| 84 |
+
return self.components.query
|
| 85 |
+
|
| 86 |
+
@property
|
| 87 |
+
def fragment(self) -> str:
|
| 88 |
+
return self.components.fragment
|
| 89 |
+
|
| 90 |
+
@property
|
| 91 |
+
def username(self) -> None | str:
|
| 92 |
+
return self.components.username
|
| 93 |
+
|
| 94 |
+
@property
|
| 95 |
+
def password(self) -> None | str:
|
| 96 |
+
return self.components.password
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def hostname(self) -> None | str:
|
| 100 |
+
return self.components.hostname
|
| 101 |
+
|
| 102 |
+
@property
|
| 103 |
+
def port(self) -> int | None:
|
| 104 |
+
return self.components.port
|
| 105 |
+
|
| 106 |
+
@property
|
| 107 |
+
def is_secure(self) -> bool:
|
| 108 |
+
return self.scheme in ("https", "wss")
|
| 109 |
+
|
| 110 |
+
def replace(self, **kwargs: typing.Any) -> URL:
|
| 111 |
+
if "username" in kwargs or "password" in kwargs or "hostname" in kwargs or "port" in kwargs:
|
| 112 |
+
hostname = kwargs.pop("hostname", None)
|
| 113 |
+
port = kwargs.pop("port", self.port)
|
| 114 |
+
username = kwargs.pop("username", self.username)
|
| 115 |
+
password = kwargs.pop("password", self.password)
|
| 116 |
+
|
| 117 |
+
if hostname is None:
|
| 118 |
+
netloc = self.netloc
|
| 119 |
+
_, _, hostname = netloc.rpartition("@")
|
| 120 |
+
|
| 121 |
+
if hostname[-1] != "]":
|
| 122 |
+
hostname = hostname.rsplit(":", 1)[0]
|
| 123 |
+
|
| 124 |
+
netloc = hostname
|
| 125 |
+
if port is not None:
|
| 126 |
+
netloc += f":{port}"
|
| 127 |
+
if username is not None:
|
| 128 |
+
userpass = username
|
| 129 |
+
if password is not None:
|
| 130 |
+
userpass += f":{password}"
|
| 131 |
+
netloc = f"{userpass}@{netloc}"
|
| 132 |
+
|
| 133 |
+
kwargs["netloc"] = netloc
|
| 134 |
+
|
| 135 |
+
components = self.components._replace(**kwargs)
|
| 136 |
+
return self.__class__(components.geturl())
|
| 137 |
+
|
| 138 |
+
def include_query_params(self, **kwargs: typing.Any) -> URL:
|
| 139 |
+
params = MultiDict(parse_qsl(self.query, keep_blank_values=True))
|
| 140 |
+
params.update({str(key): str(value) for key, value in kwargs.items()})
|
| 141 |
+
query = urlencode(params.multi_items())
|
| 142 |
+
return self.replace(query=query)
|
| 143 |
+
|
| 144 |
+
def replace_query_params(self, **kwargs: typing.Any) -> URL:
|
| 145 |
+
query = urlencode([(str(key), str(value)) for key, value in kwargs.items()])
|
| 146 |
+
return self.replace(query=query)
|
| 147 |
+
|
| 148 |
+
def remove_query_params(self, keys: str | typing.Sequence[str]) -> URL:
|
| 149 |
+
if isinstance(keys, str):
|
| 150 |
+
keys = [keys]
|
| 151 |
+
params = MultiDict(parse_qsl(self.query, keep_blank_values=True))
|
| 152 |
+
for key in keys:
|
| 153 |
+
params.pop(key, None)
|
| 154 |
+
query = urlencode(params.multi_items())
|
| 155 |
+
return self.replace(query=query)
|
| 156 |
+
|
| 157 |
+
def __eq__(self, other: typing.Any) -> bool:
|
| 158 |
+
return str(self) == str(other)
|
| 159 |
+
|
| 160 |
+
def __str__(self) -> str:
|
| 161 |
+
return self._url
|
| 162 |
+
|
| 163 |
+
def __repr__(self) -> str:
|
| 164 |
+
url = str(self)
|
| 165 |
+
if self.password:
|
| 166 |
+
url = str(self.replace(password="********"))
|
| 167 |
+
return f"{self.__class__.__name__}({repr(url)})"
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class URLPath(str):
|
| 171 |
+
"""
|
| 172 |
+
A URL path string that may also hold an associated protocol and/or host.
|
| 173 |
+
Used by the routing to return `url_path_for` matches.
|
| 174 |
+
"""
|
| 175 |
+
|
| 176 |
+
def __new__(cls, path: str, protocol: str = "", host: str = "") -> URLPath:
|
| 177 |
+
assert protocol in ("http", "websocket", "")
|
| 178 |
+
return str.__new__(cls, path)
|
| 179 |
+
|
| 180 |
+
def __init__(self, path: str, protocol: str = "", host: str = "") -> None:
|
| 181 |
+
self.protocol = protocol
|
| 182 |
+
self.host = host
|
| 183 |
+
|
| 184 |
+
def make_absolute_url(self, base_url: str | URL) -> URL:
|
| 185 |
+
if isinstance(base_url, str):
|
| 186 |
+
base_url = URL(base_url)
|
| 187 |
+
if self.protocol:
|
| 188 |
+
scheme = {
|
| 189 |
+
"http": {True: "https", False: "http"},
|
| 190 |
+
"websocket": {True: "wss", False: "ws"},
|
| 191 |
+
}[self.protocol][base_url.is_secure]
|
| 192 |
+
else:
|
| 193 |
+
scheme = base_url.scheme
|
| 194 |
+
|
| 195 |
+
netloc = self.host or base_url.netloc
|
| 196 |
+
path = base_url.path.rstrip("/") + str(self)
|
| 197 |
+
return URL(scheme=scheme, netloc=netloc, path=path)
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
class Secret:
|
| 201 |
+
"""
|
| 202 |
+
Holds a string value that should not be revealed in tracebacks etc.
|
| 203 |
+
You should cast the value to `str` at the point it is required.
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
def __init__(self, value: str):
|
| 207 |
+
self._value = value
|
| 208 |
+
|
| 209 |
+
def __repr__(self) -> str:
|
| 210 |
+
class_name = self.__class__.__name__
|
| 211 |
+
return f"{class_name}('**********')"
|
| 212 |
+
|
| 213 |
+
def __str__(self) -> str:
|
| 214 |
+
return self._value
|
| 215 |
+
|
| 216 |
+
def __bool__(self) -> bool:
|
| 217 |
+
return bool(self._value)
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
class CommaSeparatedStrings(typing.Sequence[str]):
|
| 221 |
+
def __init__(self, value: str | typing.Sequence[str]):
|
| 222 |
+
if isinstance(value, str):
|
| 223 |
+
splitter = shlex(value, posix=True)
|
| 224 |
+
splitter.whitespace = ","
|
| 225 |
+
splitter.whitespace_split = True
|
| 226 |
+
self._items = [item.strip() for item in splitter]
|
| 227 |
+
else:
|
| 228 |
+
self._items = list(value)
|
| 229 |
+
|
| 230 |
+
def __len__(self) -> int:
|
| 231 |
+
return len(self._items)
|
| 232 |
+
|
| 233 |
+
def __getitem__(self, index: int | slice) -> typing.Any:
|
| 234 |
+
return self._items[index]
|
| 235 |
+
|
| 236 |
+
def __iter__(self) -> typing.Iterator[str]:
|
| 237 |
+
return iter(self._items)
|
| 238 |
+
|
| 239 |
+
def __repr__(self) -> str:
|
| 240 |
+
class_name = self.__class__.__name__
|
| 241 |
+
items = [item for item in self]
|
| 242 |
+
return f"{class_name}({items!r})"
|
| 243 |
+
|
| 244 |
+
def __str__(self) -> str:
|
| 245 |
+
return ", ".join(repr(item) for item in self)
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
class ImmutableMultiDict(typing.Mapping[_KeyType, _CovariantValueType]):
|
| 249 |
+
_dict: dict[_KeyType, _CovariantValueType]
|
| 250 |
+
|
| 251 |
+
def __init__(
|
| 252 |
+
self,
|
| 253 |
+
*args: ImmutableMultiDict[_KeyType, _CovariantValueType]
|
| 254 |
+
| typing.Mapping[_KeyType, _CovariantValueType]
|
| 255 |
+
| typing.Iterable[tuple[_KeyType, _CovariantValueType]],
|
| 256 |
+
**kwargs: typing.Any,
|
| 257 |
+
) -> None:
|
| 258 |
+
assert len(args) < 2, "Too many arguments."
|
| 259 |
+
|
| 260 |
+
value: typing.Any = args[0] if args else []
|
| 261 |
+
if kwargs:
|
| 262 |
+
value = ImmutableMultiDict(value).multi_items() + ImmutableMultiDict(kwargs).multi_items()
|
| 263 |
+
|
| 264 |
+
if not value:
|
| 265 |
+
_items: list[tuple[typing.Any, typing.Any]] = []
|
| 266 |
+
elif hasattr(value, "multi_items"):
|
| 267 |
+
value = typing.cast(ImmutableMultiDict[_KeyType, _CovariantValueType], value)
|
| 268 |
+
_items = list(value.multi_items())
|
| 269 |
+
elif hasattr(value, "items"):
|
| 270 |
+
value = typing.cast(typing.Mapping[_KeyType, _CovariantValueType], value)
|
| 271 |
+
_items = list(value.items())
|
| 272 |
+
else:
|
| 273 |
+
value = typing.cast("list[tuple[typing.Any, typing.Any]]", value)
|
| 274 |
+
_items = list(value)
|
| 275 |
+
|
| 276 |
+
self._dict = {k: v for k, v in _items}
|
| 277 |
+
self._list = _items
|
| 278 |
+
|
| 279 |
+
def getlist(self, key: typing.Any) -> list[_CovariantValueType]:
|
| 280 |
+
return [item_value for item_key, item_value in self._list if item_key == key]
|
| 281 |
+
|
| 282 |
+
def keys(self) -> typing.KeysView[_KeyType]:
|
| 283 |
+
return self._dict.keys()
|
| 284 |
+
|
| 285 |
+
def values(self) -> typing.ValuesView[_CovariantValueType]:
|
| 286 |
+
return self._dict.values()
|
| 287 |
+
|
| 288 |
+
def items(self) -> typing.ItemsView[_KeyType, _CovariantValueType]:
|
| 289 |
+
return self._dict.items()
|
| 290 |
+
|
| 291 |
+
def multi_items(self) -> list[tuple[_KeyType, _CovariantValueType]]:
|
| 292 |
+
return list(self._list)
|
| 293 |
+
|
| 294 |
+
def __getitem__(self, key: _KeyType) -> _CovariantValueType:
|
| 295 |
+
return self._dict[key]
|
| 296 |
+
|
| 297 |
+
def __contains__(self, key: typing.Any) -> bool:
|
| 298 |
+
return key in self._dict
|
| 299 |
+
|
| 300 |
+
def __iter__(self) -> typing.Iterator[_KeyType]:
|
| 301 |
+
return iter(self.keys())
|
| 302 |
+
|
| 303 |
+
def __len__(self) -> int:
|
| 304 |
+
return len(self._dict)
|
| 305 |
+
|
| 306 |
+
def __eq__(self, other: typing.Any) -> bool:
|
| 307 |
+
if not isinstance(other, self.__class__):
|
| 308 |
+
return False
|
| 309 |
+
return sorted(self._list) == sorted(other._list)
|
| 310 |
+
|
| 311 |
+
def __repr__(self) -> str:
|
| 312 |
+
class_name = self.__class__.__name__
|
| 313 |
+
items = self.multi_items()
|
| 314 |
+
return f"{class_name}({items!r})"
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
class MultiDict(ImmutableMultiDict[typing.Any, typing.Any]):
|
| 318 |
+
def __setitem__(self, key: typing.Any, value: typing.Any) -> None:
|
| 319 |
+
self.setlist(key, [value])
|
| 320 |
+
|
| 321 |
+
def __delitem__(self, key: typing.Any) -> None:
|
| 322 |
+
self._list = [(k, v) for k, v in self._list if k != key]
|
| 323 |
+
del self._dict[key]
|
| 324 |
+
|
| 325 |
+
def pop(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
|
| 326 |
+
self._list = [(k, v) for k, v in self._list if k != key]
|
| 327 |
+
return self._dict.pop(key, default)
|
| 328 |
+
|
| 329 |
+
def popitem(self) -> tuple[typing.Any, typing.Any]:
|
| 330 |
+
key, value = self._dict.popitem()
|
| 331 |
+
self._list = [(k, v) for k, v in self._list if k != key]
|
| 332 |
+
return key, value
|
| 333 |
+
|
| 334 |
+
def poplist(self, key: typing.Any) -> list[typing.Any]:
|
| 335 |
+
values = [v for k, v in self._list if k == key]
|
| 336 |
+
self.pop(key)
|
| 337 |
+
return values
|
| 338 |
+
|
| 339 |
+
def clear(self) -> None:
|
| 340 |
+
self._dict.clear()
|
| 341 |
+
self._list.clear()
|
| 342 |
+
|
| 343 |
+
def setdefault(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
|
| 344 |
+
if key not in self:
|
| 345 |
+
self._dict[key] = default
|
| 346 |
+
self._list.append((key, default))
|
| 347 |
+
|
| 348 |
+
return self[key]
|
| 349 |
+
|
| 350 |
+
def setlist(self, key: typing.Any, values: list[typing.Any]) -> None:
|
| 351 |
+
if not values:
|
| 352 |
+
self.pop(key, None)
|
| 353 |
+
else:
|
| 354 |
+
existing_items = [(k, v) for (k, v) in self._list if k != key]
|
| 355 |
+
self._list = existing_items + [(key, value) for value in values]
|
| 356 |
+
self._dict[key] = values[-1]
|
| 357 |
+
|
| 358 |
+
def append(self, key: typing.Any, value: typing.Any) -> None:
|
| 359 |
+
self._list.append((key, value))
|
| 360 |
+
self._dict[key] = value
|
| 361 |
+
|
| 362 |
+
def update(
|
| 363 |
+
self,
|
| 364 |
+
*args: MultiDict | typing.Mapping[typing.Any, typing.Any] | list[tuple[typing.Any, typing.Any]],
|
| 365 |
+
**kwargs: typing.Any,
|
| 366 |
+
) -> None:
|
| 367 |
+
value = MultiDict(*args, **kwargs)
|
| 368 |
+
existing_items = [(k, v) for (k, v) in self._list if k not in value.keys()]
|
| 369 |
+
self._list = existing_items + value.multi_items()
|
| 370 |
+
self._dict.update(value)
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
class QueryParams(ImmutableMultiDict[str, str]):
|
| 374 |
+
"""
|
| 375 |
+
An immutable multidict.
|
| 376 |
+
"""
|
| 377 |
+
|
| 378 |
+
def __init__(
|
| 379 |
+
self,
|
| 380 |
+
*args: ImmutableMultiDict[typing.Any, typing.Any]
|
| 381 |
+
| typing.Mapping[typing.Any, typing.Any]
|
| 382 |
+
| list[tuple[typing.Any, typing.Any]]
|
| 383 |
+
| str
|
| 384 |
+
| bytes,
|
| 385 |
+
**kwargs: typing.Any,
|
| 386 |
+
) -> None:
|
| 387 |
+
assert len(args) < 2, "Too many arguments."
|
| 388 |
+
|
| 389 |
+
value = args[0] if args else []
|
| 390 |
+
|
| 391 |
+
if isinstance(value, str):
|
| 392 |
+
super().__init__(parse_qsl(value, keep_blank_values=True), **kwargs)
|
| 393 |
+
elif isinstance(value, bytes):
|
| 394 |
+
super().__init__(parse_qsl(value.decode("latin-1"), keep_blank_values=True), **kwargs)
|
| 395 |
+
else:
|
| 396 |
+
super().__init__(*args, **kwargs) # type: ignore[arg-type]
|
| 397 |
+
self._list = [(str(k), str(v)) for k, v in self._list]
|
| 398 |
+
self._dict = {str(k): str(v) for k, v in self._dict.items()}
|
| 399 |
+
|
| 400 |
+
def __str__(self) -> str:
|
| 401 |
+
return urlencode(self._list)
|
| 402 |
+
|
| 403 |
+
def __repr__(self) -> str:
|
| 404 |
+
class_name = self.__class__.__name__
|
| 405 |
+
query_string = str(self)
|
| 406 |
+
return f"{class_name}({query_string!r})"
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
class UploadFile:
|
| 410 |
+
"""
|
| 411 |
+
An uploaded file included as part of the request data.
|
| 412 |
+
"""
|
| 413 |
+
|
| 414 |
+
def __init__(
|
| 415 |
+
self,
|
| 416 |
+
file: typing.BinaryIO,
|
| 417 |
+
*,
|
| 418 |
+
size: int | None = None,
|
| 419 |
+
filename: str | None = None,
|
| 420 |
+
headers: Headers | None = None,
|
| 421 |
+
) -> None:
|
| 422 |
+
self.filename = filename
|
| 423 |
+
self.file = file
|
| 424 |
+
self.size = size
|
| 425 |
+
self.headers = headers or Headers()
|
| 426 |
+
|
| 427 |
+
@property
|
| 428 |
+
def content_type(self) -> str | None:
|
| 429 |
+
return self.headers.get("content-type", None)
|
| 430 |
+
|
| 431 |
+
@property
|
| 432 |
+
def _in_memory(self) -> bool:
|
| 433 |
+
# check for SpooledTemporaryFile._rolled
|
| 434 |
+
rolled_to_disk = getattr(self.file, "_rolled", True)
|
| 435 |
+
return not rolled_to_disk
|
| 436 |
+
|
| 437 |
+
async def write(self, data: bytes) -> None:
|
| 438 |
+
if self.size is not None:
|
| 439 |
+
self.size += len(data)
|
| 440 |
+
|
| 441 |
+
if self._in_memory:
|
| 442 |
+
self.file.write(data)
|
| 443 |
+
else:
|
| 444 |
+
await run_in_threadpool(self.file.write, data)
|
| 445 |
+
|
| 446 |
+
async def read(self, size: int = -1) -> bytes:
|
| 447 |
+
if self._in_memory:
|
| 448 |
+
return self.file.read(size)
|
| 449 |
+
return await run_in_threadpool(self.file.read, size)
|
| 450 |
+
|
| 451 |
+
async def seek(self, offset: int) -> None:
|
| 452 |
+
if self._in_memory:
|
| 453 |
+
self.file.seek(offset)
|
| 454 |
+
else:
|
| 455 |
+
await run_in_threadpool(self.file.seek, offset)
|
| 456 |
+
|
| 457 |
+
async def close(self) -> None:
|
| 458 |
+
if self._in_memory:
|
| 459 |
+
self.file.close()
|
| 460 |
+
else:
|
| 461 |
+
await run_in_threadpool(self.file.close)
|
| 462 |
+
|
| 463 |
+
def __repr__(self) -> str:
|
| 464 |
+
return (
|
| 465 |
+
f"{self.__class__.__name__}("
|
| 466 |
+
f"filename={self.filename!r}, "
|
| 467 |
+
f"size={self.size!r}, "
|
| 468 |
+
f"headers={self.headers!r})"
|
| 469 |
+
)
|
| 470 |
+
|
| 471 |
+
|
| 472 |
+
class FormData(ImmutableMultiDict[str, typing.Union[UploadFile, str]]):
|
| 473 |
+
"""
|
| 474 |
+
An immutable multidict, containing both file uploads and text input.
|
| 475 |
+
"""
|
| 476 |
+
|
| 477 |
+
def __init__(
|
| 478 |
+
self,
|
| 479 |
+
*args: FormData | typing.Mapping[str, str | UploadFile] | list[tuple[str, str | UploadFile]],
|
| 480 |
+
**kwargs: str | UploadFile,
|
| 481 |
+
) -> None:
|
| 482 |
+
super().__init__(*args, **kwargs)
|
| 483 |
+
|
| 484 |
+
async def close(self) -> None:
|
| 485 |
+
for key, value in self.multi_items():
|
| 486 |
+
if isinstance(value, UploadFile):
|
| 487 |
+
await value.close()
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
class Headers(typing.Mapping[str, str]):
|
| 491 |
+
"""
|
| 492 |
+
An immutable, case-insensitive multidict.
|
| 493 |
+
"""
|
| 494 |
+
|
| 495 |
+
def __init__(
|
| 496 |
+
self,
|
| 497 |
+
headers: typing.Mapping[str, str] | None = None,
|
| 498 |
+
raw: list[tuple[bytes, bytes]] | None = None,
|
| 499 |
+
scope: typing.MutableMapping[str, typing.Any] | None = None,
|
| 500 |
+
) -> None:
|
| 501 |
+
self._list: list[tuple[bytes, bytes]] = []
|
| 502 |
+
if headers is not None:
|
| 503 |
+
assert raw is None, 'Cannot set both "headers" and "raw".'
|
| 504 |
+
assert scope is None, 'Cannot set both "headers" and "scope".'
|
| 505 |
+
self._list = [(key.lower().encode("latin-1"), value.encode("latin-1")) for key, value in headers.items()]
|
| 506 |
+
elif raw is not None:
|
| 507 |
+
assert scope is None, 'Cannot set both "raw" and "scope".'
|
| 508 |
+
self._list = raw
|
| 509 |
+
elif scope is not None:
|
| 510 |
+
# scope["headers"] isn't necessarily a list
|
| 511 |
+
# it might be a tuple or other iterable
|
| 512 |
+
self._list = scope["headers"] = list(scope["headers"])
|
| 513 |
+
|
| 514 |
+
@property
|
| 515 |
+
def raw(self) -> list[tuple[bytes, bytes]]:
|
| 516 |
+
return list(self._list)
|
| 517 |
+
|
| 518 |
+
def keys(self) -> list[str]: # type: ignore[override]
|
| 519 |
+
return [key.decode("latin-1") for key, value in self._list]
|
| 520 |
+
|
| 521 |
+
def values(self) -> list[str]: # type: ignore[override]
|
| 522 |
+
return [value.decode("latin-1") for key, value in self._list]
|
| 523 |
+
|
| 524 |
+
def items(self) -> list[tuple[str, str]]: # type: ignore[override]
|
| 525 |
+
return [(key.decode("latin-1"), value.decode("latin-1")) for key, value in self._list]
|
| 526 |
+
|
| 527 |
+
def getlist(self, key: str) -> list[str]:
|
| 528 |
+
get_header_key = key.lower().encode("latin-1")
|
| 529 |
+
return [item_value.decode("latin-1") for item_key, item_value in self._list if item_key == get_header_key]
|
| 530 |
+
|
| 531 |
+
def mutablecopy(self) -> MutableHeaders:
|
| 532 |
+
return MutableHeaders(raw=self._list[:])
|
| 533 |
+
|
| 534 |
+
def __getitem__(self, key: str) -> str:
|
| 535 |
+
get_header_key = key.lower().encode("latin-1")
|
| 536 |
+
for header_key, header_value in self._list:
|
| 537 |
+
if header_key == get_header_key:
|
| 538 |
+
return header_value.decode("latin-1")
|
| 539 |
+
raise KeyError(key)
|
| 540 |
+
|
| 541 |
+
def __contains__(self, key: typing.Any) -> bool:
|
| 542 |
+
get_header_key = key.lower().encode("latin-1")
|
| 543 |
+
for header_key, header_value in self._list:
|
| 544 |
+
if header_key == get_header_key:
|
| 545 |
+
return True
|
| 546 |
+
return False
|
| 547 |
+
|
| 548 |
+
def __iter__(self) -> typing.Iterator[typing.Any]:
|
| 549 |
+
return iter(self.keys())
|
| 550 |
+
|
| 551 |
+
def __len__(self) -> int:
|
| 552 |
+
return len(self._list)
|
| 553 |
+
|
| 554 |
+
def __eq__(self, other: typing.Any) -> bool:
|
| 555 |
+
if not isinstance(other, Headers):
|
| 556 |
+
return False
|
| 557 |
+
return sorted(self._list) == sorted(other._list)
|
| 558 |
+
|
| 559 |
+
def __repr__(self) -> str:
|
| 560 |
+
class_name = self.__class__.__name__
|
| 561 |
+
as_dict = dict(self.items())
|
| 562 |
+
if len(as_dict) == len(self):
|
| 563 |
+
return f"{class_name}({as_dict!r})"
|
| 564 |
+
return f"{class_name}(raw={self.raw!r})"
|
| 565 |
+
|
| 566 |
+
|
| 567 |
+
class MutableHeaders(Headers):
|
| 568 |
+
def __setitem__(self, key: str, value: str) -> None:
|
| 569 |
+
"""
|
| 570 |
+
Set the header `key` to `value`, removing any duplicate entries.
|
| 571 |
+
Retains insertion order.
|
| 572 |
+
"""
|
| 573 |
+
set_key = key.lower().encode("latin-1")
|
| 574 |
+
set_value = value.encode("latin-1")
|
| 575 |
+
|
| 576 |
+
found_indexes: list[int] = []
|
| 577 |
+
for idx, (item_key, item_value) in enumerate(self._list):
|
| 578 |
+
if item_key == set_key:
|
| 579 |
+
found_indexes.append(idx)
|
| 580 |
+
|
| 581 |
+
for idx in reversed(found_indexes[1:]):
|
| 582 |
+
del self._list[idx]
|
| 583 |
+
|
| 584 |
+
if found_indexes:
|
| 585 |
+
idx = found_indexes[0]
|
| 586 |
+
self._list[idx] = (set_key, set_value)
|
| 587 |
+
else:
|
| 588 |
+
self._list.append((set_key, set_value))
|
| 589 |
+
|
| 590 |
+
def __delitem__(self, key: str) -> None:
|
| 591 |
+
"""
|
| 592 |
+
Remove the header `key`.
|
| 593 |
+
"""
|
| 594 |
+
del_key = key.lower().encode("latin-1")
|
| 595 |
+
|
| 596 |
+
pop_indexes: list[int] = []
|
| 597 |
+
for idx, (item_key, item_value) in enumerate(self._list):
|
| 598 |
+
if item_key == del_key:
|
| 599 |
+
pop_indexes.append(idx)
|
| 600 |
+
|
| 601 |
+
for idx in reversed(pop_indexes):
|
| 602 |
+
del self._list[idx]
|
| 603 |
+
|
| 604 |
+
def __ior__(self, other: typing.Mapping[str, str]) -> MutableHeaders:
|
| 605 |
+
if not isinstance(other, typing.Mapping):
|
| 606 |
+
raise TypeError(f"Expected a mapping but got {other.__class__.__name__}")
|
| 607 |
+
self.update(other)
|
| 608 |
+
return self
|
| 609 |
+
|
| 610 |
+
def __or__(self, other: typing.Mapping[str, str]) -> MutableHeaders:
|
| 611 |
+
if not isinstance(other, typing.Mapping):
|
| 612 |
+
raise TypeError(f"Expected a mapping but got {other.__class__.__name__}")
|
| 613 |
+
new = self.mutablecopy()
|
| 614 |
+
new.update(other)
|
| 615 |
+
return new
|
| 616 |
+
|
| 617 |
+
@property
|
| 618 |
+
def raw(self) -> list[tuple[bytes, bytes]]:
|
| 619 |
+
return self._list
|
| 620 |
+
|
| 621 |
+
def setdefault(self, key: str, value: str) -> str:
|
| 622 |
+
"""
|
| 623 |
+
If the header `key` does not exist, then set it to `value`.
|
| 624 |
+
Returns the header value.
|
| 625 |
+
"""
|
| 626 |
+
set_key = key.lower().encode("latin-1")
|
| 627 |
+
set_value = value.encode("latin-1")
|
| 628 |
+
|
| 629 |
+
for idx, (item_key, item_value) in enumerate(self._list):
|
| 630 |
+
if item_key == set_key:
|
| 631 |
+
return item_value.decode("latin-1")
|
| 632 |
+
self._list.append((set_key, set_value))
|
| 633 |
+
return value
|
| 634 |
+
|
| 635 |
+
def update(self, other: typing.Mapping[str, str]) -> None:
|
| 636 |
+
for key, val in other.items():
|
| 637 |
+
self[key] = val
|
| 638 |
+
|
| 639 |
+
def append(self, key: str, value: str) -> None:
|
| 640 |
+
"""
|
| 641 |
+
Append a header, preserving any duplicate entries.
|
| 642 |
+
"""
|
| 643 |
+
append_key = key.lower().encode("latin-1")
|
| 644 |
+
append_value = value.encode("latin-1")
|
| 645 |
+
self._list.append((append_key, append_value))
|
| 646 |
+
|
| 647 |
+
def add_vary_header(self, vary: str) -> None:
|
| 648 |
+
existing = self.get("vary")
|
| 649 |
+
if existing is not None:
|
| 650 |
+
vary = ", ".join([existing, vary])
|
| 651 |
+
self["vary"] = vary
|
| 652 |
+
|
| 653 |
+
|
| 654 |
+
class State:
|
| 655 |
+
"""
|
| 656 |
+
An object that can be used to store arbitrary state.
|
| 657 |
+
|
| 658 |
+
Used for `request.state` and `app.state`.
|
| 659 |
+
"""
|
| 660 |
+
|
| 661 |
+
_state: dict[str, typing.Any]
|
| 662 |
+
|
| 663 |
+
def __init__(self, state: dict[str, typing.Any] | None = None):
|
| 664 |
+
if state is None:
|
| 665 |
+
state = {}
|
| 666 |
+
super().__setattr__("_state", state)
|
| 667 |
+
|
| 668 |
+
def __setattr__(self, key: typing.Any, value: typing.Any) -> None:
|
| 669 |
+
self._state[key] = value
|
| 670 |
+
|
| 671 |
+
def __getattr__(self, key: typing.Any) -> typing.Any:
|
| 672 |
+
try:
|
| 673 |
+
return self._state[key]
|
| 674 |
+
except KeyError:
|
| 675 |
+
message = "'{}' object has no attribute '{}'"
|
| 676 |
+
raise AttributeError(message.format(self.__class__.__name__, key))
|
| 677 |
+
|
| 678 |
+
def __delattr__(self, key: typing.Any) -> None:
|
| 679 |
+
del self._state[key]
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/formparsers.py
ADDED
|
@@ -0,0 +1,271 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import typing
|
| 4 |
+
from dataclasses import dataclass, field
|
| 5 |
+
from enum import Enum
|
| 6 |
+
from tempfile import SpooledTemporaryFile
|
| 7 |
+
from urllib.parse import unquote_plus
|
| 8 |
+
|
| 9 |
+
from starlette.datastructures import FormData, Headers, UploadFile
|
| 10 |
+
|
| 11 |
+
if typing.TYPE_CHECKING:
|
| 12 |
+
import multipart
|
| 13 |
+
from multipart.multipart import MultipartCallbacks, QuerystringCallbacks, parse_options_header
|
| 14 |
+
else:
|
| 15 |
+
try:
|
| 16 |
+
try:
|
| 17 |
+
import python_multipart as multipart
|
| 18 |
+
from python_multipart.multipart import parse_options_header
|
| 19 |
+
except ModuleNotFoundError: # pragma: no cover
|
| 20 |
+
import multipart
|
| 21 |
+
from multipart.multipart import parse_options_header
|
| 22 |
+
except ModuleNotFoundError: # pragma: no cover
|
| 23 |
+
multipart = None
|
| 24 |
+
parse_options_header = None
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class FormMessage(Enum):
|
| 28 |
+
FIELD_START = 1
|
| 29 |
+
FIELD_NAME = 2
|
| 30 |
+
FIELD_DATA = 3
|
| 31 |
+
FIELD_END = 4
|
| 32 |
+
END = 5
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
@dataclass
|
| 36 |
+
class MultipartPart:
|
| 37 |
+
content_disposition: bytes | None = None
|
| 38 |
+
field_name: str = ""
|
| 39 |
+
data: bytearray = field(default_factory=bytearray)
|
| 40 |
+
file: UploadFile | None = None
|
| 41 |
+
item_headers: list[tuple[bytes, bytes]] = field(default_factory=list)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def _user_safe_decode(src: bytes | bytearray, codec: str) -> str:
|
| 45 |
+
try:
|
| 46 |
+
return src.decode(codec)
|
| 47 |
+
except (UnicodeDecodeError, LookupError):
|
| 48 |
+
return src.decode("latin-1")
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class MultiPartException(Exception):
|
| 52 |
+
def __init__(self, message: str) -> None:
|
| 53 |
+
self.message = message
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class FormParser:
|
| 57 |
+
def __init__(self, headers: Headers, stream: typing.AsyncGenerator[bytes, None]) -> None:
|
| 58 |
+
assert multipart is not None, "The `python-multipart` library must be installed to use form parsing."
|
| 59 |
+
self.headers = headers
|
| 60 |
+
self.stream = stream
|
| 61 |
+
self.messages: list[tuple[FormMessage, bytes]] = []
|
| 62 |
+
|
| 63 |
+
def on_field_start(self) -> None:
|
| 64 |
+
message = (FormMessage.FIELD_START, b"")
|
| 65 |
+
self.messages.append(message)
|
| 66 |
+
|
| 67 |
+
def on_field_name(self, data: bytes, start: int, end: int) -> None:
|
| 68 |
+
message = (FormMessage.FIELD_NAME, data[start:end])
|
| 69 |
+
self.messages.append(message)
|
| 70 |
+
|
| 71 |
+
def on_field_data(self, data: bytes, start: int, end: int) -> None:
|
| 72 |
+
message = (FormMessage.FIELD_DATA, data[start:end])
|
| 73 |
+
self.messages.append(message)
|
| 74 |
+
|
| 75 |
+
def on_field_end(self) -> None:
|
| 76 |
+
message = (FormMessage.FIELD_END, b"")
|
| 77 |
+
self.messages.append(message)
|
| 78 |
+
|
| 79 |
+
def on_end(self) -> None:
|
| 80 |
+
message = (FormMessage.END, b"")
|
| 81 |
+
self.messages.append(message)
|
| 82 |
+
|
| 83 |
+
async def parse(self) -> FormData:
|
| 84 |
+
# Callbacks dictionary.
|
| 85 |
+
callbacks: QuerystringCallbacks = {
|
| 86 |
+
"on_field_start": self.on_field_start,
|
| 87 |
+
"on_field_name": self.on_field_name,
|
| 88 |
+
"on_field_data": self.on_field_data,
|
| 89 |
+
"on_field_end": self.on_field_end,
|
| 90 |
+
"on_end": self.on_end,
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
# Create the parser.
|
| 94 |
+
parser = multipart.QuerystringParser(callbacks)
|
| 95 |
+
field_name = b""
|
| 96 |
+
field_value = b""
|
| 97 |
+
|
| 98 |
+
items: list[tuple[str, str | UploadFile]] = []
|
| 99 |
+
|
| 100 |
+
# Feed the parser with data from the request.
|
| 101 |
+
async for chunk in self.stream:
|
| 102 |
+
if chunk:
|
| 103 |
+
parser.write(chunk)
|
| 104 |
+
else:
|
| 105 |
+
parser.finalize()
|
| 106 |
+
messages = list(self.messages)
|
| 107 |
+
self.messages.clear()
|
| 108 |
+
for message_type, message_bytes in messages:
|
| 109 |
+
if message_type == FormMessage.FIELD_START:
|
| 110 |
+
field_name = b""
|
| 111 |
+
field_value = b""
|
| 112 |
+
elif message_type == FormMessage.FIELD_NAME:
|
| 113 |
+
field_name += message_bytes
|
| 114 |
+
elif message_type == FormMessage.FIELD_DATA:
|
| 115 |
+
field_value += message_bytes
|
| 116 |
+
elif message_type == FormMessage.FIELD_END:
|
| 117 |
+
name = unquote_plus(field_name.decode("latin-1"))
|
| 118 |
+
value = unquote_plus(field_value.decode("latin-1"))
|
| 119 |
+
items.append((name, value))
|
| 120 |
+
|
| 121 |
+
return FormData(items)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
class MultiPartParser:
|
| 125 |
+
max_file_size = 1024 * 1024 # 1MB
|
| 126 |
+
max_part_size = 1024 * 1024 # 1MB
|
| 127 |
+
|
| 128 |
+
def __init__(
|
| 129 |
+
self,
|
| 130 |
+
headers: Headers,
|
| 131 |
+
stream: typing.AsyncGenerator[bytes, None],
|
| 132 |
+
*,
|
| 133 |
+
max_files: int | float = 1000,
|
| 134 |
+
max_fields: int | float = 1000,
|
| 135 |
+
) -> None:
|
| 136 |
+
assert multipart is not None, "The `python-multipart` library must be installed to use form parsing."
|
| 137 |
+
self.headers = headers
|
| 138 |
+
self.stream = stream
|
| 139 |
+
self.max_files = max_files
|
| 140 |
+
self.max_fields = max_fields
|
| 141 |
+
self.items: list[tuple[str, str | UploadFile]] = []
|
| 142 |
+
self._current_files = 0
|
| 143 |
+
self._current_fields = 0
|
| 144 |
+
self._current_partial_header_name: bytes = b""
|
| 145 |
+
self._current_partial_header_value: bytes = b""
|
| 146 |
+
self._current_part = MultipartPart()
|
| 147 |
+
self._charset = ""
|
| 148 |
+
self._file_parts_to_write: list[tuple[MultipartPart, bytes]] = []
|
| 149 |
+
self._file_parts_to_finish: list[MultipartPart] = []
|
| 150 |
+
self._files_to_close_on_error: list[SpooledTemporaryFile[bytes]] = []
|
| 151 |
+
|
| 152 |
+
def on_part_begin(self) -> None:
|
| 153 |
+
self._current_part = MultipartPart()
|
| 154 |
+
|
| 155 |
+
def on_part_data(self, data: bytes, start: int, end: int) -> None:
|
| 156 |
+
message_bytes = data[start:end]
|
| 157 |
+
if self._current_part.file is None:
|
| 158 |
+
if len(self._current_part.data) + len(message_bytes) > self.max_part_size:
|
| 159 |
+
raise MultiPartException(f"Part exceeded maximum size of {int(self.max_part_size / 1024)}KB.")
|
| 160 |
+
self._current_part.data.extend(message_bytes)
|
| 161 |
+
else:
|
| 162 |
+
self._file_parts_to_write.append((self._current_part, message_bytes))
|
| 163 |
+
|
| 164 |
+
def on_part_end(self) -> None:
|
| 165 |
+
if self._current_part.file is None:
|
| 166 |
+
self.items.append(
|
| 167 |
+
(
|
| 168 |
+
self._current_part.field_name,
|
| 169 |
+
_user_safe_decode(self._current_part.data, self._charset),
|
| 170 |
+
)
|
| 171 |
+
)
|
| 172 |
+
else:
|
| 173 |
+
self._file_parts_to_finish.append(self._current_part)
|
| 174 |
+
# The file can be added to the items right now even though it's not
|
| 175 |
+
# finished yet, because it will be finished in the `parse()` method, before
|
| 176 |
+
# self.items is used in the return value.
|
| 177 |
+
self.items.append((self._current_part.field_name, self._current_part.file))
|
| 178 |
+
|
| 179 |
+
def on_header_field(self, data: bytes, start: int, end: int) -> None:
|
| 180 |
+
self._current_partial_header_name += data[start:end]
|
| 181 |
+
|
| 182 |
+
def on_header_value(self, data: bytes, start: int, end: int) -> None:
|
| 183 |
+
self._current_partial_header_value += data[start:end]
|
| 184 |
+
|
| 185 |
+
def on_header_end(self) -> None:
|
| 186 |
+
field = self._current_partial_header_name.lower()
|
| 187 |
+
if field == b"content-disposition":
|
| 188 |
+
self._current_part.content_disposition = self._current_partial_header_value
|
| 189 |
+
self._current_part.item_headers.append((field, self._current_partial_header_value))
|
| 190 |
+
self._current_partial_header_name = b""
|
| 191 |
+
self._current_partial_header_value = b""
|
| 192 |
+
|
| 193 |
+
def on_headers_finished(self) -> None:
|
| 194 |
+
disposition, options = parse_options_header(self._current_part.content_disposition)
|
| 195 |
+
try:
|
| 196 |
+
self._current_part.field_name = _user_safe_decode(options[b"name"], self._charset)
|
| 197 |
+
except KeyError:
|
| 198 |
+
raise MultiPartException('The Content-Disposition header field "name" must be provided.')
|
| 199 |
+
if b"filename" in options:
|
| 200 |
+
self._current_files += 1
|
| 201 |
+
if self._current_files > self.max_files:
|
| 202 |
+
raise MultiPartException(f"Too many files. Maximum number of files is {self.max_files}.")
|
| 203 |
+
filename = _user_safe_decode(options[b"filename"], self._charset)
|
| 204 |
+
tempfile = SpooledTemporaryFile(max_size=self.max_file_size)
|
| 205 |
+
self._files_to_close_on_error.append(tempfile)
|
| 206 |
+
self._current_part.file = UploadFile(
|
| 207 |
+
file=tempfile, # type: ignore[arg-type]
|
| 208 |
+
size=0,
|
| 209 |
+
filename=filename,
|
| 210 |
+
headers=Headers(raw=self._current_part.item_headers),
|
| 211 |
+
)
|
| 212 |
+
else:
|
| 213 |
+
self._current_fields += 1
|
| 214 |
+
if self._current_fields > self.max_fields:
|
| 215 |
+
raise MultiPartException(f"Too many fields. Maximum number of fields is {self.max_fields}.")
|
| 216 |
+
self._current_part.file = None
|
| 217 |
+
|
| 218 |
+
def on_end(self) -> None:
|
| 219 |
+
pass
|
| 220 |
+
|
| 221 |
+
async def parse(self) -> FormData:
|
| 222 |
+
# Parse the Content-Type header to get the multipart boundary.
|
| 223 |
+
_, params = parse_options_header(self.headers["Content-Type"])
|
| 224 |
+
charset = params.get(b"charset", "utf-8")
|
| 225 |
+
if isinstance(charset, bytes):
|
| 226 |
+
charset = charset.decode("latin-1")
|
| 227 |
+
self._charset = charset
|
| 228 |
+
try:
|
| 229 |
+
boundary = params[b"boundary"]
|
| 230 |
+
except KeyError:
|
| 231 |
+
raise MultiPartException("Missing boundary in multipart.")
|
| 232 |
+
|
| 233 |
+
# Callbacks dictionary.
|
| 234 |
+
callbacks: MultipartCallbacks = {
|
| 235 |
+
"on_part_begin": self.on_part_begin,
|
| 236 |
+
"on_part_data": self.on_part_data,
|
| 237 |
+
"on_part_end": self.on_part_end,
|
| 238 |
+
"on_header_field": self.on_header_field,
|
| 239 |
+
"on_header_value": self.on_header_value,
|
| 240 |
+
"on_header_end": self.on_header_end,
|
| 241 |
+
"on_headers_finished": self.on_headers_finished,
|
| 242 |
+
"on_end": self.on_end,
|
| 243 |
+
}
|
| 244 |
+
|
| 245 |
+
# Create the parser.
|
| 246 |
+
parser = multipart.MultipartParser(boundary, callbacks)
|
| 247 |
+
try:
|
| 248 |
+
# Feed the parser with data from the request.
|
| 249 |
+
async for chunk in self.stream:
|
| 250 |
+
parser.write(chunk)
|
| 251 |
+
# Write file data, it needs to use await with the UploadFile methods
|
| 252 |
+
# that call the corresponding file methods *in a threadpool*,
|
| 253 |
+
# otherwise, if they were called directly in the callback methods above
|
| 254 |
+
# (regular, non-async functions), that would block the event loop in
|
| 255 |
+
# the main thread.
|
| 256 |
+
for part, data in self._file_parts_to_write:
|
| 257 |
+
assert part.file # for type checkers
|
| 258 |
+
await part.file.write(data)
|
| 259 |
+
for part in self._file_parts_to_finish:
|
| 260 |
+
assert part.file # for type checkers
|
| 261 |
+
await part.file.seek(0)
|
| 262 |
+
self._file_parts_to_write.clear()
|
| 263 |
+
self._file_parts_to_finish.clear()
|
| 264 |
+
except MultiPartException as exc:
|
| 265 |
+
# Close all the files if there was an error.
|
| 266 |
+
for file in self._files_to_close_on_error:
|
| 267 |
+
file.close()
|
| 268 |
+
raise exc
|
| 269 |
+
|
| 270 |
+
parser.finalize()
|
| 271 |
+
return FormData(self.items)
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/__init__.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
from typing import Any, Iterator, Protocol
|
| 5 |
+
|
| 6 |
+
if sys.version_info >= (3, 10): # pragma: no cover
|
| 7 |
+
from typing import ParamSpec
|
| 8 |
+
else: # pragma: no cover
|
| 9 |
+
from typing_extensions import ParamSpec
|
| 10 |
+
|
| 11 |
+
from starlette.types import ASGIApp
|
| 12 |
+
|
| 13 |
+
P = ParamSpec("P")
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class _MiddlewareFactory(Protocol[P]):
|
| 17 |
+
def __call__(self, app: ASGIApp, *args: P.args, **kwargs: P.kwargs) -> ASGIApp: ... # pragma: no cover
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Middleware:
|
| 21 |
+
def __init__(
|
| 22 |
+
self,
|
| 23 |
+
cls: _MiddlewareFactory[P],
|
| 24 |
+
*args: P.args,
|
| 25 |
+
**kwargs: P.kwargs,
|
| 26 |
+
) -> None:
|
| 27 |
+
self.cls = cls
|
| 28 |
+
self.args = args
|
| 29 |
+
self.kwargs = kwargs
|
| 30 |
+
|
| 31 |
+
def __iter__(self) -> Iterator[Any]:
|
| 32 |
+
as_tuple = (self.cls, self.args, self.kwargs)
|
| 33 |
+
return iter(as_tuple)
|
| 34 |
+
|
| 35 |
+
def __repr__(self) -> str:
|
| 36 |
+
class_name = self.__class__.__name__
|
| 37 |
+
args_strings = [f"{value!r}" for value in self.args]
|
| 38 |
+
option_strings = [f"{key}={value!r}" for key, value in self.kwargs.items()]
|
| 39 |
+
name = getattr(self.cls, "__name__", "")
|
| 40 |
+
args_repr = ", ".join([name] + args_strings + option_strings)
|
| 41 |
+
return f"{class_name}({args_repr})"
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/cors.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import functools
|
| 4 |
+
import re
|
| 5 |
+
import typing
|
| 6 |
+
|
| 7 |
+
from starlette.datastructures import Headers, MutableHeaders
|
| 8 |
+
from starlette.responses import PlainTextResponse, Response
|
| 9 |
+
from starlette.types import ASGIApp, Message, Receive, Scope, Send
|
| 10 |
+
|
| 11 |
+
ALL_METHODS = ("DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT")
|
| 12 |
+
SAFELISTED_HEADERS = {"Accept", "Accept-Language", "Content-Language", "Content-Type"}
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class CORSMiddleware:
|
| 16 |
+
def __init__(
|
| 17 |
+
self,
|
| 18 |
+
app: ASGIApp,
|
| 19 |
+
allow_origins: typing.Sequence[str] = (),
|
| 20 |
+
allow_methods: typing.Sequence[str] = ("GET",),
|
| 21 |
+
allow_headers: typing.Sequence[str] = (),
|
| 22 |
+
allow_credentials: bool = False,
|
| 23 |
+
allow_origin_regex: str | None = None,
|
| 24 |
+
expose_headers: typing.Sequence[str] = (),
|
| 25 |
+
max_age: int = 600,
|
| 26 |
+
) -> None:
|
| 27 |
+
if "*" in allow_methods:
|
| 28 |
+
allow_methods = ALL_METHODS
|
| 29 |
+
|
| 30 |
+
compiled_allow_origin_regex = None
|
| 31 |
+
if allow_origin_regex is not None:
|
| 32 |
+
compiled_allow_origin_regex = re.compile(allow_origin_regex)
|
| 33 |
+
|
| 34 |
+
allow_all_origins = "*" in allow_origins
|
| 35 |
+
allow_all_headers = "*" in allow_headers
|
| 36 |
+
preflight_explicit_allow_origin = not allow_all_origins or allow_credentials
|
| 37 |
+
|
| 38 |
+
simple_headers = {}
|
| 39 |
+
if allow_all_origins:
|
| 40 |
+
simple_headers["Access-Control-Allow-Origin"] = "*"
|
| 41 |
+
if allow_credentials:
|
| 42 |
+
simple_headers["Access-Control-Allow-Credentials"] = "true"
|
| 43 |
+
if expose_headers:
|
| 44 |
+
simple_headers["Access-Control-Expose-Headers"] = ", ".join(expose_headers)
|
| 45 |
+
|
| 46 |
+
preflight_headers = {}
|
| 47 |
+
if preflight_explicit_allow_origin:
|
| 48 |
+
# The origin value will be set in preflight_response() if it is allowed.
|
| 49 |
+
preflight_headers["Vary"] = "Origin"
|
| 50 |
+
else:
|
| 51 |
+
preflight_headers["Access-Control-Allow-Origin"] = "*"
|
| 52 |
+
preflight_headers.update(
|
| 53 |
+
{
|
| 54 |
+
"Access-Control-Allow-Methods": ", ".join(allow_methods),
|
| 55 |
+
"Access-Control-Max-Age": str(max_age),
|
| 56 |
+
}
|
| 57 |
+
)
|
| 58 |
+
allow_headers = sorted(SAFELISTED_HEADERS | set(allow_headers))
|
| 59 |
+
if allow_headers and not allow_all_headers:
|
| 60 |
+
preflight_headers["Access-Control-Allow-Headers"] = ", ".join(allow_headers)
|
| 61 |
+
if allow_credentials:
|
| 62 |
+
preflight_headers["Access-Control-Allow-Credentials"] = "true"
|
| 63 |
+
|
| 64 |
+
self.app = app
|
| 65 |
+
self.allow_origins = allow_origins
|
| 66 |
+
self.allow_methods = allow_methods
|
| 67 |
+
self.allow_headers = [h.lower() for h in allow_headers]
|
| 68 |
+
self.allow_all_origins = allow_all_origins
|
| 69 |
+
self.allow_all_headers = allow_all_headers
|
| 70 |
+
self.preflight_explicit_allow_origin = preflight_explicit_allow_origin
|
| 71 |
+
self.allow_origin_regex = compiled_allow_origin_regex
|
| 72 |
+
self.simple_headers = simple_headers
|
| 73 |
+
self.preflight_headers = preflight_headers
|
| 74 |
+
|
| 75 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 76 |
+
if scope["type"] != "http": # pragma: no cover
|
| 77 |
+
await self.app(scope, receive, send)
|
| 78 |
+
return
|
| 79 |
+
|
| 80 |
+
method = scope["method"]
|
| 81 |
+
headers = Headers(scope=scope)
|
| 82 |
+
origin = headers.get("origin")
|
| 83 |
+
|
| 84 |
+
if origin is None:
|
| 85 |
+
await self.app(scope, receive, send)
|
| 86 |
+
return
|
| 87 |
+
|
| 88 |
+
if method == "OPTIONS" and "access-control-request-method" in headers:
|
| 89 |
+
response = self.preflight_response(request_headers=headers)
|
| 90 |
+
await response(scope, receive, send)
|
| 91 |
+
return
|
| 92 |
+
|
| 93 |
+
await self.simple_response(scope, receive, send, request_headers=headers)
|
| 94 |
+
|
| 95 |
+
def is_allowed_origin(self, origin: str) -> bool:
|
| 96 |
+
if self.allow_all_origins:
|
| 97 |
+
return True
|
| 98 |
+
|
| 99 |
+
if self.allow_origin_regex is not None and self.allow_origin_regex.fullmatch(origin):
|
| 100 |
+
return True
|
| 101 |
+
|
| 102 |
+
return origin in self.allow_origins
|
| 103 |
+
|
| 104 |
+
def preflight_response(self, request_headers: Headers) -> Response:
|
| 105 |
+
requested_origin = request_headers["origin"]
|
| 106 |
+
requested_method = request_headers["access-control-request-method"]
|
| 107 |
+
requested_headers = request_headers.get("access-control-request-headers")
|
| 108 |
+
|
| 109 |
+
headers = dict(self.preflight_headers)
|
| 110 |
+
failures = []
|
| 111 |
+
|
| 112 |
+
if self.is_allowed_origin(origin=requested_origin):
|
| 113 |
+
if self.preflight_explicit_allow_origin:
|
| 114 |
+
# The "else" case is already accounted for in self.preflight_headers
|
| 115 |
+
# and the value would be "*".
|
| 116 |
+
headers["Access-Control-Allow-Origin"] = requested_origin
|
| 117 |
+
else:
|
| 118 |
+
failures.append("origin")
|
| 119 |
+
|
| 120 |
+
if requested_method not in self.allow_methods:
|
| 121 |
+
failures.append("method")
|
| 122 |
+
|
| 123 |
+
# If we allow all headers, then we have to mirror back any requested
|
| 124 |
+
# headers in the response.
|
| 125 |
+
if self.allow_all_headers and requested_headers is not None:
|
| 126 |
+
headers["Access-Control-Allow-Headers"] = requested_headers
|
| 127 |
+
elif requested_headers is not None:
|
| 128 |
+
for header in [h.lower() for h in requested_headers.split(",")]:
|
| 129 |
+
if header.strip() not in self.allow_headers:
|
| 130 |
+
failures.append("headers")
|
| 131 |
+
break
|
| 132 |
+
|
| 133 |
+
# We don't strictly need to use 400 responses here, since its up to
|
| 134 |
+
# the browser to enforce the CORS policy, but its more informative
|
| 135 |
+
# if we do.
|
| 136 |
+
if failures:
|
| 137 |
+
failure_text = "Disallowed CORS " + ", ".join(failures)
|
| 138 |
+
return PlainTextResponse(failure_text, status_code=400, headers=headers)
|
| 139 |
+
|
| 140 |
+
return PlainTextResponse("OK", status_code=200, headers=headers)
|
| 141 |
+
|
| 142 |
+
async def simple_response(self, scope: Scope, receive: Receive, send: Send, request_headers: Headers) -> None:
|
| 143 |
+
send = functools.partial(self.send, send=send, request_headers=request_headers)
|
| 144 |
+
await self.app(scope, receive, send)
|
| 145 |
+
|
| 146 |
+
async def send(self, message: Message, send: Send, request_headers: Headers) -> None:
|
| 147 |
+
if message["type"] != "http.response.start":
|
| 148 |
+
await send(message)
|
| 149 |
+
return
|
| 150 |
+
|
| 151 |
+
message.setdefault("headers", [])
|
| 152 |
+
headers = MutableHeaders(scope=message)
|
| 153 |
+
headers.update(self.simple_headers)
|
| 154 |
+
origin = request_headers["Origin"]
|
| 155 |
+
has_cookie = "cookie" in request_headers
|
| 156 |
+
|
| 157 |
+
# If request includes any cookie headers, then we must respond
|
| 158 |
+
# with the specific origin instead of '*'.
|
| 159 |
+
if self.allow_all_origins and has_cookie:
|
| 160 |
+
self.allow_explicit_origin(headers, origin)
|
| 161 |
+
|
| 162 |
+
# If we only allow specific origins, then we have to mirror back
|
| 163 |
+
# the Origin header in the response.
|
| 164 |
+
elif not self.allow_all_origins and self.is_allowed_origin(origin=origin):
|
| 165 |
+
self.allow_explicit_origin(headers, origin)
|
| 166 |
+
|
| 167 |
+
await send(message)
|
| 168 |
+
|
| 169 |
+
@staticmethod
|
| 170 |
+
def allow_explicit_origin(headers: MutableHeaders, origin: str) -> None:
|
| 171 |
+
headers["Access-Control-Allow-Origin"] = origin
|
| 172 |
+
headers.add_vary_header("Origin")
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/exceptions.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import typing
|
| 4 |
+
|
| 5 |
+
from starlette._exception_handler import (
|
| 6 |
+
ExceptionHandlers,
|
| 7 |
+
StatusHandlers,
|
| 8 |
+
wrap_app_handling_exceptions,
|
| 9 |
+
)
|
| 10 |
+
from starlette.exceptions import HTTPException, WebSocketException
|
| 11 |
+
from starlette.requests import Request
|
| 12 |
+
from starlette.responses import PlainTextResponse, Response
|
| 13 |
+
from starlette.types import ASGIApp, Receive, Scope, Send
|
| 14 |
+
from starlette.websockets import WebSocket
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class ExceptionMiddleware:
|
| 18 |
+
def __init__(
|
| 19 |
+
self,
|
| 20 |
+
app: ASGIApp,
|
| 21 |
+
handlers: typing.Mapping[typing.Any, typing.Callable[[Request, Exception], Response]] | None = None,
|
| 22 |
+
debug: bool = False,
|
| 23 |
+
) -> None:
|
| 24 |
+
self.app = app
|
| 25 |
+
self.debug = debug # TODO: We ought to handle 404 cases if debug is set.
|
| 26 |
+
self._status_handlers: StatusHandlers = {}
|
| 27 |
+
self._exception_handlers: ExceptionHandlers = {
|
| 28 |
+
HTTPException: self.http_exception,
|
| 29 |
+
WebSocketException: self.websocket_exception,
|
| 30 |
+
}
|
| 31 |
+
if handlers is not None:
|
| 32 |
+
for key, value in handlers.items():
|
| 33 |
+
self.add_exception_handler(key, value)
|
| 34 |
+
|
| 35 |
+
def add_exception_handler(
|
| 36 |
+
self,
|
| 37 |
+
exc_class_or_status_code: int | type[Exception],
|
| 38 |
+
handler: typing.Callable[[Request, Exception], Response],
|
| 39 |
+
) -> None:
|
| 40 |
+
if isinstance(exc_class_or_status_code, int):
|
| 41 |
+
self._status_handlers[exc_class_or_status_code] = handler
|
| 42 |
+
else:
|
| 43 |
+
assert issubclass(exc_class_or_status_code, Exception)
|
| 44 |
+
self._exception_handlers[exc_class_or_status_code] = handler
|
| 45 |
+
|
| 46 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 47 |
+
if scope["type"] not in ("http", "websocket"):
|
| 48 |
+
await self.app(scope, receive, send)
|
| 49 |
+
return
|
| 50 |
+
|
| 51 |
+
scope["starlette.exception_handlers"] = (
|
| 52 |
+
self._exception_handlers,
|
| 53 |
+
self._status_handlers,
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
conn: Request | WebSocket
|
| 57 |
+
if scope["type"] == "http":
|
| 58 |
+
conn = Request(scope, receive, send)
|
| 59 |
+
else:
|
| 60 |
+
conn = WebSocket(scope, receive, send)
|
| 61 |
+
|
| 62 |
+
await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
|
| 63 |
+
|
| 64 |
+
def http_exception(self, request: Request, exc: Exception) -> Response:
|
| 65 |
+
assert isinstance(exc, HTTPException)
|
| 66 |
+
if exc.status_code in {204, 304}:
|
| 67 |
+
return Response(status_code=exc.status_code, headers=exc.headers)
|
| 68 |
+
return PlainTextResponse(exc.detail, status_code=exc.status_code, headers=exc.headers)
|
| 69 |
+
|
| 70 |
+
async def websocket_exception(self, websocket: WebSocket, exc: Exception) -> None:
|
| 71 |
+
assert isinstance(exc, WebSocketException)
|
| 72 |
+
await websocket.close(code=exc.code, reason=exc.reason) # pragma: no cover
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/httpsredirect.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from starlette.datastructures import URL
|
| 2 |
+
from starlette.responses import RedirectResponse
|
| 3 |
+
from starlette.types import ASGIApp, Receive, Scope, Send
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class HTTPSRedirectMiddleware:
|
| 7 |
+
def __init__(self, app: ASGIApp) -> None:
|
| 8 |
+
self.app = app
|
| 9 |
+
|
| 10 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 11 |
+
if scope["type"] in ("http", "websocket") and scope["scheme"] in ("http", "ws"):
|
| 12 |
+
url = URL(scope=scope)
|
| 13 |
+
redirect_scheme = {"http": "https", "ws": "wss"}[url.scheme]
|
| 14 |
+
netloc = url.hostname if url.port in (80, 443) else url.netloc
|
| 15 |
+
url = url.replace(scheme=redirect_scheme, netloc=netloc)
|
| 16 |
+
response = RedirectResponse(url, status_code=307)
|
| 17 |
+
await response(scope, receive, send)
|
| 18 |
+
else:
|
| 19 |
+
await self.app(scope, receive, send)
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/trustedhost.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import typing
|
| 4 |
+
|
| 5 |
+
from starlette.datastructures import URL, Headers
|
| 6 |
+
from starlette.responses import PlainTextResponse, RedirectResponse, Response
|
| 7 |
+
from starlette.types import ASGIApp, Receive, Scope, Send
|
| 8 |
+
|
| 9 |
+
ENFORCE_DOMAIN_WILDCARD = "Domain wildcard patterns must be like '*.example.com'."
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TrustedHostMiddleware:
|
| 13 |
+
def __init__(
|
| 14 |
+
self,
|
| 15 |
+
app: ASGIApp,
|
| 16 |
+
allowed_hosts: typing.Sequence[str] | None = None,
|
| 17 |
+
www_redirect: bool = True,
|
| 18 |
+
) -> None:
|
| 19 |
+
if allowed_hosts is None:
|
| 20 |
+
allowed_hosts = ["*"]
|
| 21 |
+
|
| 22 |
+
for pattern in allowed_hosts:
|
| 23 |
+
assert "*" not in pattern[1:], ENFORCE_DOMAIN_WILDCARD
|
| 24 |
+
if pattern.startswith("*") and pattern != "*":
|
| 25 |
+
assert pattern.startswith("*."), ENFORCE_DOMAIN_WILDCARD
|
| 26 |
+
self.app = app
|
| 27 |
+
self.allowed_hosts = list(allowed_hosts)
|
| 28 |
+
self.allow_any = "*" in allowed_hosts
|
| 29 |
+
self.www_redirect = www_redirect
|
| 30 |
+
|
| 31 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 32 |
+
if self.allow_any or scope["type"] not in (
|
| 33 |
+
"http",
|
| 34 |
+
"websocket",
|
| 35 |
+
): # pragma: no cover
|
| 36 |
+
await self.app(scope, receive, send)
|
| 37 |
+
return
|
| 38 |
+
|
| 39 |
+
headers = Headers(scope=scope)
|
| 40 |
+
host = headers.get("host", "").split(":")[0]
|
| 41 |
+
is_valid_host = False
|
| 42 |
+
found_www_redirect = False
|
| 43 |
+
for pattern in self.allowed_hosts:
|
| 44 |
+
if host == pattern or (pattern.startswith("*") and host.endswith(pattern[1:])):
|
| 45 |
+
is_valid_host = True
|
| 46 |
+
break
|
| 47 |
+
elif "www." + host == pattern:
|
| 48 |
+
found_www_redirect = True
|
| 49 |
+
|
| 50 |
+
if is_valid_host:
|
| 51 |
+
await self.app(scope, receive, send)
|
| 52 |
+
else:
|
| 53 |
+
response: Response
|
| 54 |
+
if found_www_redirect and self.www_redirect:
|
| 55 |
+
url = URL(scope=scope)
|
| 56 |
+
redirect_url = url.replace(netloc="www." + url.netloc)
|
| 57 |
+
response = RedirectResponse(url=str(redirect_url))
|
| 58 |
+
else:
|
| 59 |
+
response = PlainTextResponse("Invalid host header", status_code=400)
|
| 60 |
+
await response(scope, receive, send)
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/middleware/wsgi.py
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import io
|
| 4 |
+
import math
|
| 5 |
+
import sys
|
| 6 |
+
import typing
|
| 7 |
+
import warnings
|
| 8 |
+
|
| 9 |
+
import anyio
|
| 10 |
+
from anyio.abc import ObjectReceiveStream, ObjectSendStream
|
| 11 |
+
|
| 12 |
+
from starlette.types import Receive, Scope, Send
|
| 13 |
+
|
| 14 |
+
warnings.warn(
|
| 15 |
+
"starlette.middleware.wsgi is deprecated and will be removed in a future release. "
|
| 16 |
+
"Please refer to https://github.com/abersheeran/a2wsgi as a replacement.",
|
| 17 |
+
DeprecationWarning,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def build_environ(scope: Scope, body: bytes) -> dict[str, typing.Any]:
|
| 22 |
+
"""
|
| 23 |
+
Builds a scope and request body into a WSGI environ object.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
script_name = scope.get("root_path", "").encode("utf8").decode("latin1")
|
| 27 |
+
path_info = scope["path"].encode("utf8").decode("latin1")
|
| 28 |
+
if path_info.startswith(script_name):
|
| 29 |
+
path_info = path_info[len(script_name) :]
|
| 30 |
+
|
| 31 |
+
environ = {
|
| 32 |
+
"REQUEST_METHOD": scope["method"],
|
| 33 |
+
"SCRIPT_NAME": script_name,
|
| 34 |
+
"PATH_INFO": path_info,
|
| 35 |
+
"QUERY_STRING": scope["query_string"].decode("ascii"),
|
| 36 |
+
"SERVER_PROTOCOL": f"HTTP/{scope['http_version']}",
|
| 37 |
+
"wsgi.version": (1, 0),
|
| 38 |
+
"wsgi.url_scheme": scope.get("scheme", "http"),
|
| 39 |
+
"wsgi.input": io.BytesIO(body),
|
| 40 |
+
"wsgi.errors": sys.stdout,
|
| 41 |
+
"wsgi.multithread": True,
|
| 42 |
+
"wsgi.multiprocess": True,
|
| 43 |
+
"wsgi.run_once": False,
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
# Get server name and port - required in WSGI, not in ASGI
|
| 47 |
+
server = scope.get("server") or ("localhost", 80)
|
| 48 |
+
environ["SERVER_NAME"] = server[0]
|
| 49 |
+
environ["SERVER_PORT"] = server[1]
|
| 50 |
+
|
| 51 |
+
# Get client IP address
|
| 52 |
+
if scope.get("client"):
|
| 53 |
+
environ["REMOTE_ADDR"] = scope["client"][0]
|
| 54 |
+
|
| 55 |
+
# Go through headers and make them into environ entries
|
| 56 |
+
for name, value in scope.get("headers", []):
|
| 57 |
+
name = name.decode("latin1")
|
| 58 |
+
if name == "content-length":
|
| 59 |
+
corrected_name = "CONTENT_LENGTH"
|
| 60 |
+
elif name == "content-type":
|
| 61 |
+
corrected_name = "CONTENT_TYPE"
|
| 62 |
+
else:
|
| 63 |
+
corrected_name = f"HTTP_{name}".upper().replace("-", "_")
|
| 64 |
+
# HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in
|
| 65 |
+
# case
|
| 66 |
+
value = value.decode("latin1")
|
| 67 |
+
if corrected_name in environ:
|
| 68 |
+
value = environ[corrected_name] + "," + value
|
| 69 |
+
environ[corrected_name] = value
|
| 70 |
+
return environ
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class WSGIMiddleware:
|
| 74 |
+
def __init__(self, app: typing.Callable[..., typing.Any]) -> None:
|
| 75 |
+
self.app = app
|
| 76 |
+
|
| 77 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 78 |
+
assert scope["type"] == "http"
|
| 79 |
+
responder = WSGIResponder(self.app, scope)
|
| 80 |
+
await responder(receive, send)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class WSGIResponder:
|
| 84 |
+
stream_send: ObjectSendStream[typing.MutableMapping[str, typing.Any]]
|
| 85 |
+
stream_receive: ObjectReceiveStream[typing.MutableMapping[str, typing.Any]]
|
| 86 |
+
|
| 87 |
+
def __init__(self, app: typing.Callable[..., typing.Any], scope: Scope) -> None:
|
| 88 |
+
self.app = app
|
| 89 |
+
self.scope = scope
|
| 90 |
+
self.status = None
|
| 91 |
+
self.response_headers = None
|
| 92 |
+
self.stream_send, self.stream_receive = anyio.create_memory_object_stream(math.inf)
|
| 93 |
+
self.response_started = False
|
| 94 |
+
self.exc_info: typing.Any = None
|
| 95 |
+
|
| 96 |
+
async def __call__(self, receive: Receive, send: Send) -> None:
|
| 97 |
+
body = b""
|
| 98 |
+
more_body = True
|
| 99 |
+
while more_body:
|
| 100 |
+
message = await receive()
|
| 101 |
+
body += message.get("body", b"")
|
| 102 |
+
more_body = message.get("more_body", False)
|
| 103 |
+
environ = build_environ(self.scope, body)
|
| 104 |
+
|
| 105 |
+
async with anyio.create_task_group() as task_group:
|
| 106 |
+
task_group.start_soon(self.sender, send)
|
| 107 |
+
async with self.stream_send:
|
| 108 |
+
await anyio.to_thread.run_sync(self.wsgi, environ, self.start_response)
|
| 109 |
+
if self.exc_info is not None:
|
| 110 |
+
raise self.exc_info[0].with_traceback(self.exc_info[1], self.exc_info[2])
|
| 111 |
+
|
| 112 |
+
async def sender(self, send: Send) -> None:
|
| 113 |
+
async with self.stream_receive:
|
| 114 |
+
async for message in self.stream_receive:
|
| 115 |
+
await send(message)
|
| 116 |
+
|
| 117 |
+
def start_response(
|
| 118 |
+
self,
|
| 119 |
+
status: str,
|
| 120 |
+
response_headers: list[tuple[str, str]],
|
| 121 |
+
exc_info: typing.Any = None,
|
| 122 |
+
) -> None:
|
| 123 |
+
self.exc_info = exc_info
|
| 124 |
+
if not self.response_started:
|
| 125 |
+
self.response_started = True
|
| 126 |
+
status_code_string, _ = status.split(" ", 1)
|
| 127 |
+
status_code = int(status_code_string)
|
| 128 |
+
headers = [
|
| 129 |
+
(name.strip().encode("ascii").lower(), value.strip().encode("ascii"))
|
| 130 |
+
for name, value in response_headers
|
| 131 |
+
]
|
| 132 |
+
anyio.from_thread.run(
|
| 133 |
+
self.stream_send.send,
|
| 134 |
+
{
|
| 135 |
+
"type": "http.response.start",
|
| 136 |
+
"status": status_code,
|
| 137 |
+
"headers": headers,
|
| 138 |
+
},
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
def wsgi(
|
| 142 |
+
self,
|
| 143 |
+
environ: dict[str, typing.Any],
|
| 144 |
+
start_response: typing.Callable[..., typing.Any],
|
| 145 |
+
) -> None:
|
| 146 |
+
for chunk in self.app(environ, start_response):
|
| 147 |
+
anyio.from_thread.run(
|
| 148 |
+
self.stream_send.send,
|
| 149 |
+
{"type": "http.response.body", "body": chunk, "more_body": True},
|
| 150 |
+
)
|
| 151 |
+
|
| 152 |
+
anyio.from_thread.run(self.stream_send.send, {"type": "http.response.body", "body": b""})
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/py.typed
ADDED
|
File without changes
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/responses.py
ADDED
|
@@ -0,0 +1,531 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import http.cookies
|
| 4 |
+
import json
|
| 5 |
+
import os
|
| 6 |
+
import re
|
| 7 |
+
import stat
|
| 8 |
+
import typing
|
| 9 |
+
import warnings
|
| 10 |
+
from datetime import datetime
|
| 11 |
+
from email.utils import format_datetime, formatdate
|
| 12 |
+
from functools import partial
|
| 13 |
+
from mimetypes import guess_type
|
| 14 |
+
from secrets import token_hex
|
| 15 |
+
from urllib.parse import quote
|
| 16 |
+
|
| 17 |
+
import anyio
|
| 18 |
+
import anyio.to_thread
|
| 19 |
+
|
| 20 |
+
from starlette._compat import md5_hexdigest
|
| 21 |
+
from starlette.background import BackgroundTask
|
| 22 |
+
from starlette.concurrency import iterate_in_threadpool
|
| 23 |
+
from starlette.datastructures import URL, Headers, MutableHeaders
|
| 24 |
+
from starlette.types import Receive, Scope, Send
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class Response:
|
| 28 |
+
media_type = None
|
| 29 |
+
charset = "utf-8"
|
| 30 |
+
|
| 31 |
+
def __init__(
|
| 32 |
+
self,
|
| 33 |
+
content: typing.Any = None,
|
| 34 |
+
status_code: int = 200,
|
| 35 |
+
headers: typing.Mapping[str, str] | None = None,
|
| 36 |
+
media_type: str | None = None,
|
| 37 |
+
background: BackgroundTask | None = None,
|
| 38 |
+
) -> None:
|
| 39 |
+
self.status_code = status_code
|
| 40 |
+
if media_type is not None:
|
| 41 |
+
self.media_type = media_type
|
| 42 |
+
self.background = background
|
| 43 |
+
self.body = self.render(content)
|
| 44 |
+
self.init_headers(headers)
|
| 45 |
+
|
| 46 |
+
def render(self, content: typing.Any) -> bytes | memoryview:
|
| 47 |
+
if content is None:
|
| 48 |
+
return b""
|
| 49 |
+
if isinstance(content, (bytes, memoryview)):
|
| 50 |
+
return content
|
| 51 |
+
return content.encode(self.charset) # type: ignore
|
| 52 |
+
|
| 53 |
+
def init_headers(self, headers: typing.Mapping[str, str] | None = None) -> None:
|
| 54 |
+
if headers is None:
|
| 55 |
+
raw_headers: list[tuple[bytes, bytes]] = []
|
| 56 |
+
populate_content_length = True
|
| 57 |
+
populate_content_type = True
|
| 58 |
+
else:
|
| 59 |
+
raw_headers = [(k.lower().encode("latin-1"), v.encode("latin-1")) for k, v in headers.items()]
|
| 60 |
+
keys = [h[0] for h in raw_headers]
|
| 61 |
+
populate_content_length = b"content-length" not in keys
|
| 62 |
+
populate_content_type = b"content-type" not in keys
|
| 63 |
+
|
| 64 |
+
body = getattr(self, "body", None)
|
| 65 |
+
if (
|
| 66 |
+
body is not None
|
| 67 |
+
and populate_content_length
|
| 68 |
+
and not (self.status_code < 200 or self.status_code in (204, 304))
|
| 69 |
+
):
|
| 70 |
+
content_length = str(len(body))
|
| 71 |
+
raw_headers.append((b"content-length", content_length.encode("latin-1")))
|
| 72 |
+
|
| 73 |
+
content_type = self.media_type
|
| 74 |
+
if content_type is not None and populate_content_type:
|
| 75 |
+
if content_type.startswith("text/") and "charset=" not in content_type.lower():
|
| 76 |
+
content_type += "; charset=" + self.charset
|
| 77 |
+
raw_headers.append((b"content-type", content_type.encode("latin-1")))
|
| 78 |
+
|
| 79 |
+
self.raw_headers = raw_headers
|
| 80 |
+
|
| 81 |
+
@property
|
| 82 |
+
def headers(self) -> MutableHeaders:
|
| 83 |
+
if not hasattr(self, "_headers"):
|
| 84 |
+
self._headers = MutableHeaders(raw=self.raw_headers)
|
| 85 |
+
return self._headers
|
| 86 |
+
|
| 87 |
+
def set_cookie(
|
| 88 |
+
self,
|
| 89 |
+
key: str,
|
| 90 |
+
value: str = "",
|
| 91 |
+
max_age: int | None = None,
|
| 92 |
+
expires: datetime | str | int | None = None,
|
| 93 |
+
path: str | None = "/",
|
| 94 |
+
domain: str | None = None,
|
| 95 |
+
secure: bool = False,
|
| 96 |
+
httponly: bool = False,
|
| 97 |
+
samesite: typing.Literal["lax", "strict", "none"] | None = "lax",
|
| 98 |
+
) -> None:
|
| 99 |
+
cookie: http.cookies.BaseCookie[str] = http.cookies.SimpleCookie()
|
| 100 |
+
cookie[key] = value
|
| 101 |
+
if max_age is not None:
|
| 102 |
+
cookie[key]["max-age"] = max_age
|
| 103 |
+
if expires is not None:
|
| 104 |
+
if isinstance(expires, datetime):
|
| 105 |
+
cookie[key]["expires"] = format_datetime(expires, usegmt=True)
|
| 106 |
+
else:
|
| 107 |
+
cookie[key]["expires"] = expires
|
| 108 |
+
if path is not None:
|
| 109 |
+
cookie[key]["path"] = path
|
| 110 |
+
if domain is not None:
|
| 111 |
+
cookie[key]["domain"] = domain
|
| 112 |
+
if secure:
|
| 113 |
+
cookie[key]["secure"] = True
|
| 114 |
+
if httponly:
|
| 115 |
+
cookie[key]["httponly"] = True
|
| 116 |
+
if samesite is not None:
|
| 117 |
+
assert samesite.lower() in [
|
| 118 |
+
"strict",
|
| 119 |
+
"lax",
|
| 120 |
+
"none",
|
| 121 |
+
], "samesite must be either 'strict', 'lax' or 'none'"
|
| 122 |
+
cookie[key]["samesite"] = samesite
|
| 123 |
+
cookie_val = cookie.output(header="").strip()
|
| 124 |
+
self.raw_headers.append((b"set-cookie", cookie_val.encode("latin-1")))
|
| 125 |
+
|
| 126 |
+
def delete_cookie(
|
| 127 |
+
self,
|
| 128 |
+
key: str,
|
| 129 |
+
path: str = "/",
|
| 130 |
+
domain: str | None = None,
|
| 131 |
+
secure: bool = False,
|
| 132 |
+
httponly: bool = False,
|
| 133 |
+
samesite: typing.Literal["lax", "strict", "none"] | None = "lax",
|
| 134 |
+
) -> None:
|
| 135 |
+
self.set_cookie(
|
| 136 |
+
key,
|
| 137 |
+
max_age=0,
|
| 138 |
+
expires=0,
|
| 139 |
+
path=path,
|
| 140 |
+
domain=domain,
|
| 141 |
+
secure=secure,
|
| 142 |
+
httponly=httponly,
|
| 143 |
+
samesite=samesite,
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 147 |
+
prefix = "websocket." if scope["type"] == "websocket" else ""
|
| 148 |
+
await send(
|
| 149 |
+
{
|
| 150 |
+
"type": prefix + "http.response.start",
|
| 151 |
+
"status": self.status_code,
|
| 152 |
+
"headers": self.raw_headers,
|
| 153 |
+
}
|
| 154 |
+
)
|
| 155 |
+
await send({"type": prefix + "http.response.body", "body": self.body})
|
| 156 |
+
|
| 157 |
+
if self.background is not None:
|
| 158 |
+
await self.background()
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
class HTMLResponse(Response):
|
| 162 |
+
media_type = "text/html"
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
class PlainTextResponse(Response):
|
| 166 |
+
media_type = "text/plain"
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
class JSONResponse(Response):
|
| 170 |
+
media_type = "application/json"
|
| 171 |
+
|
| 172 |
+
def __init__(
|
| 173 |
+
self,
|
| 174 |
+
content: typing.Any,
|
| 175 |
+
status_code: int = 200,
|
| 176 |
+
headers: typing.Mapping[str, str] | None = None,
|
| 177 |
+
media_type: str | None = None,
|
| 178 |
+
background: BackgroundTask | None = None,
|
| 179 |
+
) -> None:
|
| 180 |
+
super().__init__(content, status_code, headers, media_type, background)
|
| 181 |
+
|
| 182 |
+
def render(self, content: typing.Any) -> bytes:
|
| 183 |
+
return json.dumps(
|
| 184 |
+
content,
|
| 185 |
+
ensure_ascii=False,
|
| 186 |
+
allow_nan=False,
|
| 187 |
+
indent=None,
|
| 188 |
+
separators=(",", ":"),
|
| 189 |
+
).encode("utf-8")
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
class RedirectResponse(Response):
|
| 193 |
+
def __init__(
|
| 194 |
+
self,
|
| 195 |
+
url: str | URL,
|
| 196 |
+
status_code: int = 307,
|
| 197 |
+
headers: typing.Mapping[str, str] | None = None,
|
| 198 |
+
background: BackgroundTask | None = None,
|
| 199 |
+
) -> None:
|
| 200 |
+
super().__init__(content=b"", status_code=status_code, headers=headers, background=background)
|
| 201 |
+
self.headers["location"] = quote(str(url), safe=":/%#?=@[]!$&'()*+,;")
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
Content = typing.Union[str, bytes, memoryview]
|
| 205 |
+
SyncContentStream = typing.Iterable[Content]
|
| 206 |
+
AsyncContentStream = typing.AsyncIterable[Content]
|
| 207 |
+
ContentStream = typing.Union[AsyncContentStream, SyncContentStream]
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
class StreamingResponse(Response):
|
| 211 |
+
body_iterator: AsyncContentStream
|
| 212 |
+
|
| 213 |
+
def __init__(
|
| 214 |
+
self,
|
| 215 |
+
content: ContentStream,
|
| 216 |
+
status_code: int = 200,
|
| 217 |
+
headers: typing.Mapping[str, str] | None = None,
|
| 218 |
+
media_type: str | None = None,
|
| 219 |
+
background: BackgroundTask | None = None,
|
| 220 |
+
) -> None:
|
| 221 |
+
if isinstance(content, typing.AsyncIterable):
|
| 222 |
+
self.body_iterator = content
|
| 223 |
+
else:
|
| 224 |
+
self.body_iterator = iterate_in_threadpool(content)
|
| 225 |
+
self.status_code = status_code
|
| 226 |
+
self.media_type = self.media_type if media_type is None else media_type
|
| 227 |
+
self.background = background
|
| 228 |
+
self.init_headers(headers)
|
| 229 |
+
|
| 230 |
+
async def listen_for_disconnect(self, receive: Receive) -> None:
|
| 231 |
+
while True:
|
| 232 |
+
message = await receive()
|
| 233 |
+
if message["type"] == "http.disconnect":
|
| 234 |
+
break
|
| 235 |
+
|
| 236 |
+
async def stream_response(self, send: Send) -> None:
|
| 237 |
+
await send(
|
| 238 |
+
{
|
| 239 |
+
"type": "http.response.start",
|
| 240 |
+
"status": self.status_code,
|
| 241 |
+
"headers": self.raw_headers,
|
| 242 |
+
}
|
| 243 |
+
)
|
| 244 |
+
async for chunk in self.body_iterator:
|
| 245 |
+
if not isinstance(chunk, (bytes, memoryview)):
|
| 246 |
+
chunk = chunk.encode(self.charset)
|
| 247 |
+
await send({"type": "http.response.body", "body": chunk, "more_body": True})
|
| 248 |
+
|
| 249 |
+
await send({"type": "http.response.body", "body": b"", "more_body": False})
|
| 250 |
+
|
| 251 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 252 |
+
async with anyio.create_task_group() as task_group:
|
| 253 |
+
|
| 254 |
+
async def wrap(func: typing.Callable[[], typing.Awaitable[None]]) -> None:
|
| 255 |
+
await func()
|
| 256 |
+
task_group.cancel_scope.cancel()
|
| 257 |
+
|
| 258 |
+
task_group.start_soon(wrap, partial(self.stream_response, send))
|
| 259 |
+
await wrap(partial(self.listen_for_disconnect, receive))
|
| 260 |
+
|
| 261 |
+
if self.background is not None:
|
| 262 |
+
await self.background()
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
class MalformedRangeHeader(Exception):
|
| 266 |
+
def __init__(self, content: str = "Malformed range header.") -> None:
|
| 267 |
+
self.content = content
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
class RangeNotSatisfiable(Exception):
|
| 271 |
+
def __init__(self, max_size: int) -> None:
|
| 272 |
+
self.max_size = max_size
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
_RANGE_PATTERN = re.compile(r"(\d*)-(\d*)")
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
class FileResponse(Response):
|
| 279 |
+
chunk_size = 64 * 1024
|
| 280 |
+
|
| 281 |
+
def __init__(
|
| 282 |
+
self,
|
| 283 |
+
path: str | os.PathLike[str],
|
| 284 |
+
status_code: int = 200,
|
| 285 |
+
headers: typing.Mapping[str, str] | None = None,
|
| 286 |
+
media_type: str | None = None,
|
| 287 |
+
background: BackgroundTask | None = None,
|
| 288 |
+
filename: str | None = None,
|
| 289 |
+
stat_result: os.stat_result | None = None,
|
| 290 |
+
method: str | None = None,
|
| 291 |
+
content_disposition_type: str = "attachment",
|
| 292 |
+
) -> None:
|
| 293 |
+
self.path = path
|
| 294 |
+
self.status_code = status_code
|
| 295 |
+
self.filename = filename
|
| 296 |
+
if method is not None:
|
| 297 |
+
warnings.warn(
|
| 298 |
+
"The 'method' parameter is not used, and it will be removed.",
|
| 299 |
+
DeprecationWarning,
|
| 300 |
+
)
|
| 301 |
+
if media_type is None:
|
| 302 |
+
media_type = guess_type(filename or path)[0] or "text/plain"
|
| 303 |
+
self.media_type = media_type
|
| 304 |
+
self.background = background
|
| 305 |
+
self.init_headers(headers)
|
| 306 |
+
self.headers.setdefault("accept-ranges", "bytes")
|
| 307 |
+
if self.filename is not None:
|
| 308 |
+
content_disposition_filename = quote(self.filename)
|
| 309 |
+
if content_disposition_filename != self.filename:
|
| 310 |
+
content_disposition = f"{content_disposition_type}; filename*=utf-8''{content_disposition_filename}"
|
| 311 |
+
else:
|
| 312 |
+
content_disposition = f'{content_disposition_type}; filename="{self.filename}"'
|
| 313 |
+
self.headers.setdefault("content-disposition", content_disposition)
|
| 314 |
+
self.stat_result = stat_result
|
| 315 |
+
if stat_result is not None:
|
| 316 |
+
self.set_stat_headers(stat_result)
|
| 317 |
+
|
| 318 |
+
def set_stat_headers(self, stat_result: os.stat_result) -> None:
|
| 319 |
+
content_length = str(stat_result.st_size)
|
| 320 |
+
last_modified = formatdate(stat_result.st_mtime, usegmt=True)
|
| 321 |
+
etag_base = str(stat_result.st_mtime) + "-" + str(stat_result.st_size)
|
| 322 |
+
etag = f'"{md5_hexdigest(etag_base.encode(), usedforsecurity=False)}"'
|
| 323 |
+
|
| 324 |
+
self.headers.setdefault("content-length", content_length)
|
| 325 |
+
self.headers.setdefault("last-modified", last_modified)
|
| 326 |
+
self.headers.setdefault("etag", etag)
|
| 327 |
+
|
| 328 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 329 |
+
send_header_only: bool = scope["method"].upper() == "HEAD"
|
| 330 |
+
if self.stat_result is None:
|
| 331 |
+
try:
|
| 332 |
+
stat_result = await anyio.to_thread.run_sync(os.stat, self.path)
|
| 333 |
+
self.set_stat_headers(stat_result)
|
| 334 |
+
except FileNotFoundError:
|
| 335 |
+
raise RuntimeError(f"File at path {self.path} does not exist.")
|
| 336 |
+
else:
|
| 337 |
+
mode = stat_result.st_mode
|
| 338 |
+
if not stat.S_ISREG(mode):
|
| 339 |
+
raise RuntimeError(f"File at path {self.path} is not a file.")
|
| 340 |
+
else:
|
| 341 |
+
stat_result = self.stat_result
|
| 342 |
+
|
| 343 |
+
headers = Headers(scope=scope)
|
| 344 |
+
http_range = headers.get("range")
|
| 345 |
+
http_if_range = headers.get("if-range")
|
| 346 |
+
|
| 347 |
+
if http_range is None or (http_if_range is not None and not self._should_use_range(http_if_range, stat_result)):
|
| 348 |
+
await self._handle_simple(send, send_header_only)
|
| 349 |
+
else:
|
| 350 |
+
try:
|
| 351 |
+
ranges = self._parse_range_header(http_range, stat_result.st_size)
|
| 352 |
+
except MalformedRangeHeader as exc:
|
| 353 |
+
return await PlainTextResponse(exc.content, status_code=400)(scope, receive, send)
|
| 354 |
+
except RangeNotSatisfiable as exc:
|
| 355 |
+
response = PlainTextResponse(status_code=416, headers={"Content-Range": f"*/{exc.max_size}"})
|
| 356 |
+
return await response(scope, receive, send)
|
| 357 |
+
|
| 358 |
+
if len(ranges) == 1:
|
| 359 |
+
start, end = ranges[0]
|
| 360 |
+
await self._handle_single_range(send, start, end, stat_result.st_size, send_header_only)
|
| 361 |
+
else:
|
| 362 |
+
await self._handle_multiple_ranges(send, ranges, stat_result.st_size, send_header_only)
|
| 363 |
+
|
| 364 |
+
if self.background is not None:
|
| 365 |
+
await self.background()
|
| 366 |
+
|
| 367 |
+
async def _handle_simple(self, send: Send, send_header_only: bool) -> None:
|
| 368 |
+
await send({"type": "http.response.start", "status": self.status_code, "headers": self.raw_headers})
|
| 369 |
+
if send_header_only:
|
| 370 |
+
await send({"type": "http.response.body", "body": b"", "more_body": False})
|
| 371 |
+
else:
|
| 372 |
+
async with await anyio.open_file(self.path, mode="rb") as file:
|
| 373 |
+
more_body = True
|
| 374 |
+
while more_body:
|
| 375 |
+
chunk = await file.read(self.chunk_size)
|
| 376 |
+
more_body = len(chunk) == self.chunk_size
|
| 377 |
+
await send({"type": "http.response.body", "body": chunk, "more_body": more_body})
|
| 378 |
+
|
| 379 |
+
async def _handle_single_range(
|
| 380 |
+
self, send: Send, start: int, end: int, file_size: int, send_header_only: bool
|
| 381 |
+
) -> None:
|
| 382 |
+
self.headers["content-range"] = f"bytes {start}-{end - 1}/{file_size}"
|
| 383 |
+
self.headers["content-length"] = str(end - start)
|
| 384 |
+
await send({"type": "http.response.start", "status": 206, "headers": self.raw_headers})
|
| 385 |
+
if send_header_only:
|
| 386 |
+
await send({"type": "http.response.body", "body": b"", "more_body": False})
|
| 387 |
+
else:
|
| 388 |
+
async with await anyio.open_file(self.path, mode="rb") as file:
|
| 389 |
+
await file.seek(start)
|
| 390 |
+
more_body = True
|
| 391 |
+
while more_body:
|
| 392 |
+
chunk = await file.read(min(self.chunk_size, end - start))
|
| 393 |
+
start += len(chunk)
|
| 394 |
+
more_body = len(chunk) == self.chunk_size and start < end
|
| 395 |
+
await send({"type": "http.response.body", "body": chunk, "more_body": more_body})
|
| 396 |
+
|
| 397 |
+
async def _handle_multiple_ranges(
|
| 398 |
+
self,
|
| 399 |
+
send: Send,
|
| 400 |
+
ranges: list[tuple[int, int]],
|
| 401 |
+
file_size: int,
|
| 402 |
+
send_header_only: bool,
|
| 403 |
+
) -> None:
|
| 404 |
+
# In firefox and chrome, they use boundary with 95-96 bits entropy (that's roughly 13 bytes).
|
| 405 |
+
boundary = token_hex(13)
|
| 406 |
+
content_length, header_generator = self.generate_multipart(
|
| 407 |
+
ranges, boundary, file_size, self.headers["content-type"]
|
| 408 |
+
)
|
| 409 |
+
self.headers["content-range"] = f"multipart/byteranges; boundary={boundary}"
|
| 410 |
+
self.headers["content-length"] = str(content_length)
|
| 411 |
+
await send({"type": "http.response.start", "status": 206, "headers": self.raw_headers})
|
| 412 |
+
if send_header_only:
|
| 413 |
+
await send({"type": "http.response.body", "body": b"", "more_body": False})
|
| 414 |
+
else:
|
| 415 |
+
async with await anyio.open_file(self.path, mode="rb") as file:
|
| 416 |
+
for start, end in ranges:
|
| 417 |
+
await send({"type": "http.response.body", "body": header_generator(start, end), "more_body": True})
|
| 418 |
+
await file.seek(start)
|
| 419 |
+
while start < end:
|
| 420 |
+
chunk = await file.read(min(self.chunk_size, end - start))
|
| 421 |
+
start += len(chunk)
|
| 422 |
+
await send({"type": "http.response.body", "body": chunk, "more_body": True})
|
| 423 |
+
await send({"type": "http.response.body", "body": b"\n", "more_body": True})
|
| 424 |
+
await send(
|
| 425 |
+
{
|
| 426 |
+
"type": "http.response.body",
|
| 427 |
+
"body": f"\n--{boundary}--\n".encode("latin-1"),
|
| 428 |
+
"more_body": False,
|
| 429 |
+
}
|
| 430 |
+
)
|
| 431 |
+
|
| 432 |
+
@classmethod
|
| 433 |
+
def _should_use_range(cls, http_if_range: str, stat_result: os.stat_result) -> bool:
|
| 434 |
+
etag_base = str(stat_result.st_mtime) + "-" + str(stat_result.st_size)
|
| 435 |
+
etag = f'"{md5_hexdigest(etag_base.encode(), usedforsecurity=False)}"'
|
| 436 |
+
return http_if_range == formatdate(stat_result.st_mtime, usegmt=True) or http_if_range == etag
|
| 437 |
+
|
| 438 |
+
@staticmethod
|
| 439 |
+
def _parse_range_header(http_range: str, file_size: int) -> list[tuple[int, int]]:
|
| 440 |
+
ranges: list[tuple[int, int]] = []
|
| 441 |
+
try:
|
| 442 |
+
units, range_ = http_range.split("=", 1)
|
| 443 |
+
except ValueError:
|
| 444 |
+
raise MalformedRangeHeader()
|
| 445 |
+
|
| 446 |
+
units = units.strip().lower()
|
| 447 |
+
|
| 448 |
+
if units != "bytes":
|
| 449 |
+
raise MalformedRangeHeader("Only support bytes range")
|
| 450 |
+
|
| 451 |
+
ranges = [
|
| 452 |
+
(
|
| 453 |
+
int(_[0]) if _[0] else file_size - int(_[1]),
|
| 454 |
+
int(_[1]) + 1 if _[0] and _[1] and int(_[1]) < file_size else file_size,
|
| 455 |
+
)
|
| 456 |
+
for _ in _RANGE_PATTERN.findall(range_)
|
| 457 |
+
if _ != ("", "")
|
| 458 |
+
]
|
| 459 |
+
|
| 460 |
+
if len(ranges) == 0:
|
| 461 |
+
raise MalformedRangeHeader("Range header: range must be requested")
|
| 462 |
+
|
| 463 |
+
if any(not (0 <= start < file_size) for start, _ in ranges):
|
| 464 |
+
raise RangeNotSatisfiable(file_size)
|
| 465 |
+
|
| 466 |
+
if any(start > end for start, end in ranges):
|
| 467 |
+
raise MalformedRangeHeader("Range header: start must be less than end")
|
| 468 |
+
|
| 469 |
+
if len(ranges) == 1:
|
| 470 |
+
return ranges
|
| 471 |
+
|
| 472 |
+
# Merge ranges
|
| 473 |
+
result: list[tuple[int, int]] = []
|
| 474 |
+
for start, end in ranges:
|
| 475 |
+
for p in range(len(result)):
|
| 476 |
+
p_start, p_end = result[p]
|
| 477 |
+
if start > p_end:
|
| 478 |
+
continue
|
| 479 |
+
elif end < p_start:
|
| 480 |
+
result.insert(p, (start, end)) # THIS IS NOT REACHED!
|
| 481 |
+
break
|
| 482 |
+
else:
|
| 483 |
+
result[p] = (min(start, p_start), max(end, p_end))
|
| 484 |
+
break
|
| 485 |
+
else:
|
| 486 |
+
result.append((start, end))
|
| 487 |
+
|
| 488 |
+
return result
|
| 489 |
+
|
| 490 |
+
def generate_multipart(
|
| 491 |
+
self,
|
| 492 |
+
ranges: typing.Sequence[tuple[int, int]],
|
| 493 |
+
boundary: str,
|
| 494 |
+
max_size: int,
|
| 495 |
+
content_type: str,
|
| 496 |
+
) -> tuple[int, typing.Callable[[int, int], bytes]]:
|
| 497 |
+
r"""
|
| 498 |
+
Multipart response headers generator.
|
| 499 |
+
|
| 500 |
+
```
|
| 501 |
+
--{boundary}\n
|
| 502 |
+
Content-Type: {content_type}\n
|
| 503 |
+
Content-Range: bytes {start}-{end-1}/{max_size}\n
|
| 504 |
+
\n
|
| 505 |
+
..........content...........\n
|
| 506 |
+
--{boundary}\n
|
| 507 |
+
Content-Type: {content_type}\n
|
| 508 |
+
Content-Range: bytes {start}-{end-1}/{max_size}\n
|
| 509 |
+
\n
|
| 510 |
+
..........content...........\n
|
| 511 |
+
--{boundary}--\n
|
| 512 |
+
```
|
| 513 |
+
"""
|
| 514 |
+
boundary_len = len(boundary)
|
| 515 |
+
static_header_part_len = 44 + boundary_len + len(content_type) + len(str(max_size))
|
| 516 |
+
content_length = sum(
|
| 517 |
+
(len(str(start)) + len(str(end - 1)) + static_header_part_len) # Headers
|
| 518 |
+
+ (end - start) # Content
|
| 519 |
+
for start, end in ranges
|
| 520 |
+
) + (
|
| 521 |
+
5 + boundary_len # --boundary--\n
|
| 522 |
+
)
|
| 523 |
+
return (
|
| 524 |
+
content_length,
|
| 525 |
+
lambda start, end: (
|
| 526 |
+
f"--{boundary}\n"
|
| 527 |
+
f"Content-Type: {content_type}\n"
|
| 528 |
+
f"Content-Range: bytes {start}-{end-1}/{max_size}\n"
|
| 529 |
+
"\n"
|
| 530 |
+
).encode("latin-1"),
|
| 531 |
+
)
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/schemas.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import inspect
|
| 4 |
+
import re
|
| 5 |
+
import typing
|
| 6 |
+
|
| 7 |
+
from starlette.requests import Request
|
| 8 |
+
from starlette.responses import Response
|
| 9 |
+
from starlette.routing import BaseRoute, Host, Mount, Route
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
import yaml
|
| 13 |
+
except ModuleNotFoundError: # pragma: no cover
|
| 14 |
+
yaml = None # type: ignore[assignment]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class OpenAPIResponse(Response):
|
| 18 |
+
media_type = "application/vnd.oai.openapi"
|
| 19 |
+
|
| 20 |
+
def render(self, content: typing.Any) -> bytes:
|
| 21 |
+
assert yaml is not None, "`pyyaml` must be installed to use OpenAPIResponse."
|
| 22 |
+
assert isinstance(content, dict), "The schema passed to OpenAPIResponse should be a dictionary."
|
| 23 |
+
return yaml.dump(content, default_flow_style=False).encode("utf-8")
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class EndpointInfo(typing.NamedTuple):
|
| 27 |
+
path: str
|
| 28 |
+
http_method: str
|
| 29 |
+
func: typing.Callable[..., typing.Any]
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
_remove_converter_pattern = re.compile(r":\w+}")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class BaseSchemaGenerator:
|
| 36 |
+
def get_schema(self, routes: list[BaseRoute]) -> dict[str, typing.Any]:
|
| 37 |
+
raise NotImplementedError() # pragma: no cover
|
| 38 |
+
|
| 39 |
+
def get_endpoints(self, routes: list[BaseRoute]) -> list[EndpointInfo]:
|
| 40 |
+
"""
|
| 41 |
+
Given the routes, yields the following information:
|
| 42 |
+
|
| 43 |
+
- path
|
| 44 |
+
eg: /users/
|
| 45 |
+
- http_method
|
| 46 |
+
one of 'get', 'post', 'put', 'patch', 'delete', 'options'
|
| 47 |
+
- func
|
| 48 |
+
method ready to extract the docstring
|
| 49 |
+
"""
|
| 50 |
+
endpoints_info: list[EndpointInfo] = []
|
| 51 |
+
|
| 52 |
+
for route in routes:
|
| 53 |
+
if isinstance(route, (Mount, Host)):
|
| 54 |
+
routes = route.routes or []
|
| 55 |
+
if isinstance(route, Mount):
|
| 56 |
+
path = self._remove_converter(route.path)
|
| 57 |
+
else:
|
| 58 |
+
path = ""
|
| 59 |
+
sub_endpoints = [
|
| 60 |
+
EndpointInfo(
|
| 61 |
+
path="".join((path, sub_endpoint.path)),
|
| 62 |
+
http_method=sub_endpoint.http_method,
|
| 63 |
+
func=sub_endpoint.func,
|
| 64 |
+
)
|
| 65 |
+
for sub_endpoint in self.get_endpoints(routes)
|
| 66 |
+
]
|
| 67 |
+
endpoints_info.extend(sub_endpoints)
|
| 68 |
+
|
| 69 |
+
elif not isinstance(route, Route) or not route.include_in_schema:
|
| 70 |
+
continue
|
| 71 |
+
|
| 72 |
+
elif inspect.isfunction(route.endpoint) or inspect.ismethod(route.endpoint):
|
| 73 |
+
path = self._remove_converter(route.path)
|
| 74 |
+
for method in route.methods or ["GET"]:
|
| 75 |
+
if method == "HEAD":
|
| 76 |
+
continue
|
| 77 |
+
endpoints_info.append(EndpointInfo(path, method.lower(), route.endpoint))
|
| 78 |
+
else:
|
| 79 |
+
path = self._remove_converter(route.path)
|
| 80 |
+
for method in ["get", "post", "put", "patch", "delete", "options"]:
|
| 81 |
+
if not hasattr(route.endpoint, method):
|
| 82 |
+
continue
|
| 83 |
+
func = getattr(route.endpoint, method)
|
| 84 |
+
endpoints_info.append(EndpointInfo(path, method.lower(), func))
|
| 85 |
+
|
| 86 |
+
return endpoints_info
|
| 87 |
+
|
| 88 |
+
def _remove_converter(self, path: str) -> str:
|
| 89 |
+
"""
|
| 90 |
+
Remove the converter from the path.
|
| 91 |
+
For example, a route like this:
|
| 92 |
+
Route("/users/{id:int}", endpoint=get_user, methods=["GET"])
|
| 93 |
+
Should be represented as `/users/{id}` in the OpenAPI schema.
|
| 94 |
+
"""
|
| 95 |
+
return _remove_converter_pattern.sub("}", path)
|
| 96 |
+
|
| 97 |
+
def parse_docstring(self, func_or_method: typing.Callable[..., typing.Any]) -> dict[str, typing.Any]:
|
| 98 |
+
"""
|
| 99 |
+
Given a function, parse the docstring as YAML and return a dictionary of info.
|
| 100 |
+
"""
|
| 101 |
+
docstring = func_or_method.__doc__
|
| 102 |
+
if not docstring:
|
| 103 |
+
return {}
|
| 104 |
+
|
| 105 |
+
assert yaml is not None, "`pyyaml` must be installed to use parse_docstring."
|
| 106 |
+
|
| 107 |
+
# We support having regular docstrings before the schema
|
| 108 |
+
# definition. Here we return just the schema part from
|
| 109 |
+
# the docstring.
|
| 110 |
+
docstring = docstring.split("---")[-1]
|
| 111 |
+
|
| 112 |
+
parsed = yaml.safe_load(docstring)
|
| 113 |
+
|
| 114 |
+
if not isinstance(parsed, dict):
|
| 115 |
+
# A regular docstring (not yaml formatted) can return
|
| 116 |
+
# a simple string here, which wouldn't follow the schema.
|
| 117 |
+
return {}
|
| 118 |
+
|
| 119 |
+
return parsed
|
| 120 |
+
|
| 121 |
+
def OpenAPIResponse(self, request: Request) -> Response:
|
| 122 |
+
routes = request.app.routes
|
| 123 |
+
schema = self.get_schema(routes=routes)
|
| 124 |
+
return OpenAPIResponse(schema)
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class SchemaGenerator(BaseSchemaGenerator):
|
| 128 |
+
def __init__(self, base_schema: dict[str, typing.Any]) -> None:
|
| 129 |
+
self.base_schema = base_schema
|
| 130 |
+
|
| 131 |
+
def get_schema(self, routes: list[BaseRoute]) -> dict[str, typing.Any]:
|
| 132 |
+
schema = dict(self.base_schema)
|
| 133 |
+
schema.setdefault("paths", {})
|
| 134 |
+
endpoints_info = self.get_endpoints(routes)
|
| 135 |
+
|
| 136 |
+
for endpoint in endpoints_info:
|
| 137 |
+
parsed = self.parse_docstring(endpoint.func)
|
| 138 |
+
|
| 139 |
+
if not parsed:
|
| 140 |
+
continue
|
| 141 |
+
|
| 142 |
+
if endpoint.path not in schema["paths"]:
|
| 143 |
+
schema["paths"][endpoint.path] = {}
|
| 144 |
+
|
| 145 |
+
schema["paths"][endpoint.path][endpoint.http_method] = parsed
|
| 146 |
+
|
| 147 |
+
return schema
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/staticfiles.py
ADDED
|
@@ -0,0 +1,220 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import errno
|
| 4 |
+
import importlib.util
|
| 5 |
+
import os
|
| 6 |
+
import stat
|
| 7 |
+
import typing
|
| 8 |
+
from email.utils import parsedate
|
| 9 |
+
|
| 10 |
+
import anyio
|
| 11 |
+
import anyio.to_thread
|
| 12 |
+
|
| 13 |
+
from starlette._utils import get_route_path
|
| 14 |
+
from starlette.datastructures import URL, Headers
|
| 15 |
+
from starlette.exceptions import HTTPException
|
| 16 |
+
from starlette.responses import FileResponse, RedirectResponse, Response
|
| 17 |
+
from starlette.types import Receive, Scope, Send
|
| 18 |
+
|
| 19 |
+
PathLike = typing.Union[str, "os.PathLike[str]"]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class NotModifiedResponse(Response):
|
| 23 |
+
NOT_MODIFIED_HEADERS = (
|
| 24 |
+
"cache-control",
|
| 25 |
+
"content-location",
|
| 26 |
+
"date",
|
| 27 |
+
"etag",
|
| 28 |
+
"expires",
|
| 29 |
+
"vary",
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
def __init__(self, headers: Headers):
|
| 33 |
+
super().__init__(
|
| 34 |
+
status_code=304,
|
| 35 |
+
headers={name: value for name, value in headers.items() if name in self.NOT_MODIFIED_HEADERS},
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class StaticFiles:
|
| 40 |
+
def __init__(
|
| 41 |
+
self,
|
| 42 |
+
*,
|
| 43 |
+
directory: PathLike | None = None,
|
| 44 |
+
packages: list[str | tuple[str, str]] | None = None,
|
| 45 |
+
html: bool = False,
|
| 46 |
+
check_dir: bool = True,
|
| 47 |
+
follow_symlink: bool = False,
|
| 48 |
+
) -> None:
|
| 49 |
+
self.directory = directory
|
| 50 |
+
self.packages = packages
|
| 51 |
+
self.all_directories = self.get_directories(directory, packages)
|
| 52 |
+
self.html = html
|
| 53 |
+
self.config_checked = False
|
| 54 |
+
self.follow_symlink = follow_symlink
|
| 55 |
+
if check_dir and directory is not None and not os.path.isdir(directory):
|
| 56 |
+
raise RuntimeError(f"Directory '{directory}' does not exist")
|
| 57 |
+
|
| 58 |
+
def get_directories(
|
| 59 |
+
self,
|
| 60 |
+
directory: PathLike | None = None,
|
| 61 |
+
packages: list[str | tuple[str, str]] | None = None,
|
| 62 |
+
) -> list[PathLike]:
|
| 63 |
+
"""
|
| 64 |
+
Given `directory` and `packages` arguments, return a list of all the
|
| 65 |
+
directories that should be used for serving static files from.
|
| 66 |
+
"""
|
| 67 |
+
directories = []
|
| 68 |
+
if directory is not None:
|
| 69 |
+
directories.append(directory)
|
| 70 |
+
|
| 71 |
+
for package in packages or []:
|
| 72 |
+
if isinstance(package, tuple):
|
| 73 |
+
package, statics_dir = package
|
| 74 |
+
else:
|
| 75 |
+
statics_dir = "statics"
|
| 76 |
+
spec = importlib.util.find_spec(package)
|
| 77 |
+
assert spec is not None, f"Package {package!r} could not be found."
|
| 78 |
+
assert spec.origin is not None, f"Package {package!r} could not be found."
|
| 79 |
+
package_directory = os.path.normpath(os.path.join(spec.origin, "..", statics_dir))
|
| 80 |
+
assert os.path.isdir(
|
| 81 |
+
package_directory
|
| 82 |
+
), f"Directory '{statics_dir!r}' in package {package!r} could not be found."
|
| 83 |
+
directories.append(package_directory)
|
| 84 |
+
|
| 85 |
+
return directories
|
| 86 |
+
|
| 87 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 88 |
+
"""
|
| 89 |
+
The ASGI entry point.
|
| 90 |
+
"""
|
| 91 |
+
assert scope["type"] == "http"
|
| 92 |
+
|
| 93 |
+
if not self.config_checked:
|
| 94 |
+
await self.check_config()
|
| 95 |
+
self.config_checked = True
|
| 96 |
+
|
| 97 |
+
path = self.get_path(scope)
|
| 98 |
+
response = await self.get_response(path, scope)
|
| 99 |
+
await response(scope, receive, send)
|
| 100 |
+
|
| 101 |
+
def get_path(self, scope: Scope) -> str:
|
| 102 |
+
"""
|
| 103 |
+
Given the ASGI scope, return the `path` string to serve up,
|
| 104 |
+
with OS specific path separators, and any '..', '.' components removed.
|
| 105 |
+
"""
|
| 106 |
+
route_path = get_route_path(scope)
|
| 107 |
+
return os.path.normpath(os.path.join(*route_path.split("/")))
|
| 108 |
+
|
| 109 |
+
async def get_response(self, path: str, scope: Scope) -> Response:
|
| 110 |
+
"""
|
| 111 |
+
Returns an HTTP response, given the incoming path, method and request headers.
|
| 112 |
+
"""
|
| 113 |
+
if scope["method"] not in ("GET", "HEAD"):
|
| 114 |
+
raise HTTPException(status_code=405)
|
| 115 |
+
|
| 116 |
+
try:
|
| 117 |
+
full_path, stat_result = await anyio.to_thread.run_sync(self.lookup_path, path)
|
| 118 |
+
except PermissionError:
|
| 119 |
+
raise HTTPException(status_code=401)
|
| 120 |
+
except OSError as exc:
|
| 121 |
+
# Filename is too long, so it can't be a valid static file.
|
| 122 |
+
if exc.errno == errno.ENAMETOOLONG:
|
| 123 |
+
raise HTTPException(status_code=404)
|
| 124 |
+
|
| 125 |
+
raise exc
|
| 126 |
+
|
| 127 |
+
if stat_result and stat.S_ISREG(stat_result.st_mode):
|
| 128 |
+
# We have a static file to serve.
|
| 129 |
+
return self.file_response(full_path, stat_result, scope)
|
| 130 |
+
|
| 131 |
+
elif stat_result and stat.S_ISDIR(stat_result.st_mode) and self.html:
|
| 132 |
+
# We're in HTML mode, and have got a directory URL.
|
| 133 |
+
# Check if we have 'index.html' file to serve.
|
| 134 |
+
index_path = os.path.join(path, "index.html")
|
| 135 |
+
full_path, stat_result = await anyio.to_thread.run_sync(self.lookup_path, index_path)
|
| 136 |
+
if stat_result is not None and stat.S_ISREG(stat_result.st_mode):
|
| 137 |
+
if not scope["path"].endswith("/"):
|
| 138 |
+
# Directory URLs should redirect to always end in "/".
|
| 139 |
+
url = URL(scope=scope)
|
| 140 |
+
url = url.replace(path=url.path + "/")
|
| 141 |
+
return RedirectResponse(url=url)
|
| 142 |
+
return self.file_response(full_path, stat_result, scope)
|
| 143 |
+
|
| 144 |
+
if self.html:
|
| 145 |
+
# Check for '404.html' if we're in HTML mode.
|
| 146 |
+
full_path, stat_result = await anyio.to_thread.run_sync(self.lookup_path, "404.html")
|
| 147 |
+
if stat_result and stat.S_ISREG(stat_result.st_mode):
|
| 148 |
+
return FileResponse(full_path, stat_result=stat_result, status_code=404)
|
| 149 |
+
raise HTTPException(status_code=404)
|
| 150 |
+
|
| 151 |
+
def lookup_path(self, path: str) -> tuple[str, os.stat_result | None]:
|
| 152 |
+
for directory in self.all_directories:
|
| 153 |
+
joined_path = os.path.join(directory, path)
|
| 154 |
+
if self.follow_symlink:
|
| 155 |
+
full_path = os.path.abspath(joined_path)
|
| 156 |
+
else:
|
| 157 |
+
full_path = os.path.realpath(joined_path)
|
| 158 |
+
directory = os.path.realpath(directory)
|
| 159 |
+
if os.path.commonpath([full_path, directory]) != directory:
|
| 160 |
+
# Don't allow misbehaving clients to break out of the static files
|
| 161 |
+
# directory.
|
| 162 |
+
continue
|
| 163 |
+
try:
|
| 164 |
+
return full_path, os.stat(full_path)
|
| 165 |
+
except (FileNotFoundError, NotADirectoryError):
|
| 166 |
+
continue
|
| 167 |
+
return "", None
|
| 168 |
+
|
| 169 |
+
def file_response(
|
| 170 |
+
self,
|
| 171 |
+
full_path: PathLike,
|
| 172 |
+
stat_result: os.stat_result,
|
| 173 |
+
scope: Scope,
|
| 174 |
+
status_code: int = 200,
|
| 175 |
+
) -> Response:
|
| 176 |
+
request_headers = Headers(scope=scope)
|
| 177 |
+
|
| 178 |
+
response = FileResponse(full_path, status_code=status_code, stat_result=stat_result)
|
| 179 |
+
if self.is_not_modified(response.headers, request_headers):
|
| 180 |
+
return NotModifiedResponse(response.headers)
|
| 181 |
+
return response
|
| 182 |
+
|
| 183 |
+
async def check_config(self) -> None:
|
| 184 |
+
"""
|
| 185 |
+
Perform a one-off configuration check that StaticFiles is actually
|
| 186 |
+
pointed at a directory, so that we can raise loud errors rather than
|
| 187 |
+
just returning 404 responses.
|
| 188 |
+
"""
|
| 189 |
+
if self.directory is None:
|
| 190 |
+
return
|
| 191 |
+
|
| 192 |
+
try:
|
| 193 |
+
stat_result = await anyio.to_thread.run_sync(os.stat, self.directory)
|
| 194 |
+
except FileNotFoundError:
|
| 195 |
+
raise RuntimeError(f"StaticFiles directory '{self.directory}' does not exist.")
|
| 196 |
+
if not (stat.S_ISDIR(stat_result.st_mode) or stat.S_ISLNK(stat_result.st_mode)):
|
| 197 |
+
raise RuntimeError(f"StaticFiles path '{self.directory}' is not a directory.")
|
| 198 |
+
|
| 199 |
+
def is_not_modified(self, response_headers: Headers, request_headers: Headers) -> bool:
|
| 200 |
+
"""
|
| 201 |
+
Given the request and response headers, return `True` if an HTTP
|
| 202 |
+
"Not Modified" response could be returned instead.
|
| 203 |
+
"""
|
| 204 |
+
try:
|
| 205 |
+
if_none_match = request_headers["if-none-match"]
|
| 206 |
+
etag = response_headers["etag"]
|
| 207 |
+
if etag in [tag.strip(" W/") for tag in if_none_match.split(",")]:
|
| 208 |
+
return True
|
| 209 |
+
except KeyError:
|
| 210 |
+
pass
|
| 211 |
+
|
| 212 |
+
try:
|
| 213 |
+
if_modified_since = parsedate(request_headers["if-modified-since"])
|
| 214 |
+
last_modified = parsedate(response_headers["last-modified"])
|
| 215 |
+
if if_modified_since is not None and last_modified is not None and if_modified_since >= last_modified:
|
| 216 |
+
return True
|
| 217 |
+
except KeyError:
|
| 218 |
+
pass
|
| 219 |
+
|
| 220 |
+
return False
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/templating.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import typing
|
| 4 |
+
import warnings
|
| 5 |
+
from os import PathLike
|
| 6 |
+
|
| 7 |
+
from starlette.background import BackgroundTask
|
| 8 |
+
from starlette.datastructures import URL
|
| 9 |
+
from starlette.requests import Request
|
| 10 |
+
from starlette.responses import HTMLResponse
|
| 11 |
+
from starlette.types import Receive, Scope, Send
|
| 12 |
+
|
| 13 |
+
try:
|
| 14 |
+
import jinja2
|
| 15 |
+
|
| 16 |
+
# @contextfunction was renamed to @pass_context in Jinja 3.0, and was removed in 3.1
|
| 17 |
+
# hence we try to get pass_context (most installs will be >=3.1)
|
| 18 |
+
# and fall back to contextfunction,
|
| 19 |
+
# adding a type ignore for mypy to let us access an attribute that may not exist
|
| 20 |
+
if hasattr(jinja2, "pass_context"):
|
| 21 |
+
pass_context = jinja2.pass_context
|
| 22 |
+
else: # pragma: no cover
|
| 23 |
+
pass_context = jinja2.contextfunction # type: ignore[attr-defined]
|
| 24 |
+
except ModuleNotFoundError: # pragma: no cover
|
| 25 |
+
jinja2 = None # type: ignore[assignment]
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class _TemplateResponse(HTMLResponse):
|
| 29 |
+
def __init__(
|
| 30 |
+
self,
|
| 31 |
+
template: typing.Any,
|
| 32 |
+
context: dict[str, typing.Any],
|
| 33 |
+
status_code: int = 200,
|
| 34 |
+
headers: typing.Mapping[str, str] | None = None,
|
| 35 |
+
media_type: str | None = None,
|
| 36 |
+
background: BackgroundTask | None = None,
|
| 37 |
+
):
|
| 38 |
+
self.template = template
|
| 39 |
+
self.context = context
|
| 40 |
+
content = template.render(context)
|
| 41 |
+
super().__init__(content, status_code, headers, media_type, background)
|
| 42 |
+
|
| 43 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 44 |
+
request = self.context.get("request", {})
|
| 45 |
+
extensions = request.get("extensions", {})
|
| 46 |
+
if "http.response.debug" in extensions:
|
| 47 |
+
await send(
|
| 48 |
+
{
|
| 49 |
+
"type": "http.response.debug",
|
| 50 |
+
"info": {
|
| 51 |
+
"template": self.template,
|
| 52 |
+
"context": self.context,
|
| 53 |
+
},
|
| 54 |
+
}
|
| 55 |
+
)
|
| 56 |
+
await super().__call__(scope, receive, send)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class Jinja2Templates:
|
| 60 |
+
"""
|
| 61 |
+
templates = Jinja2Templates("templates")
|
| 62 |
+
|
| 63 |
+
return templates.TemplateResponse("index.html", {"request": request})
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
@typing.overload
|
| 67 |
+
def __init__(
|
| 68 |
+
self,
|
| 69 |
+
directory: str | PathLike[str] | typing.Sequence[str | PathLike[str]],
|
| 70 |
+
*,
|
| 71 |
+
context_processors: list[typing.Callable[[Request], dict[str, typing.Any]]] | None = None,
|
| 72 |
+
**env_options: typing.Any,
|
| 73 |
+
) -> None: ...
|
| 74 |
+
|
| 75 |
+
@typing.overload
|
| 76 |
+
def __init__(
|
| 77 |
+
self,
|
| 78 |
+
*,
|
| 79 |
+
env: jinja2.Environment,
|
| 80 |
+
context_processors: list[typing.Callable[[Request], dict[str, typing.Any]]] | None = None,
|
| 81 |
+
) -> None: ...
|
| 82 |
+
|
| 83 |
+
def __init__(
|
| 84 |
+
self,
|
| 85 |
+
directory: str | PathLike[str] | typing.Sequence[str | PathLike[str]] | None = None,
|
| 86 |
+
*,
|
| 87 |
+
context_processors: list[typing.Callable[[Request], dict[str, typing.Any]]] | None = None,
|
| 88 |
+
env: jinja2.Environment | None = None,
|
| 89 |
+
**env_options: typing.Any,
|
| 90 |
+
) -> None:
|
| 91 |
+
if env_options:
|
| 92 |
+
warnings.warn(
|
| 93 |
+
"Extra environment options are deprecated. Use a preconfigured jinja2.Environment instead.",
|
| 94 |
+
DeprecationWarning,
|
| 95 |
+
)
|
| 96 |
+
assert jinja2 is not None, "jinja2 must be installed to use Jinja2Templates"
|
| 97 |
+
assert bool(directory) ^ bool(env), "either 'directory' or 'env' arguments must be passed"
|
| 98 |
+
self.context_processors = context_processors or []
|
| 99 |
+
if directory is not None:
|
| 100 |
+
self.env = self._create_env(directory, **env_options)
|
| 101 |
+
elif env is not None:
|
| 102 |
+
self.env = env
|
| 103 |
+
|
| 104 |
+
self._setup_env_defaults(self.env)
|
| 105 |
+
|
| 106 |
+
def _create_env(
|
| 107 |
+
self,
|
| 108 |
+
directory: str | PathLike[str] | typing.Sequence[str | PathLike[str]],
|
| 109 |
+
**env_options: typing.Any,
|
| 110 |
+
) -> jinja2.Environment:
|
| 111 |
+
loader = jinja2.FileSystemLoader(directory)
|
| 112 |
+
env_options.setdefault("loader", loader)
|
| 113 |
+
env_options.setdefault("autoescape", True)
|
| 114 |
+
|
| 115 |
+
return jinja2.Environment(**env_options)
|
| 116 |
+
|
| 117 |
+
def _setup_env_defaults(self, env: jinja2.Environment) -> None:
|
| 118 |
+
@pass_context
|
| 119 |
+
def url_for(
|
| 120 |
+
context: dict[str, typing.Any],
|
| 121 |
+
name: str,
|
| 122 |
+
/,
|
| 123 |
+
**path_params: typing.Any,
|
| 124 |
+
) -> URL:
|
| 125 |
+
request: Request = context["request"]
|
| 126 |
+
return request.url_for(name, **path_params)
|
| 127 |
+
|
| 128 |
+
env.globals.setdefault("url_for", url_for)
|
| 129 |
+
|
| 130 |
+
def get_template(self, name: str) -> jinja2.Template:
|
| 131 |
+
return self.env.get_template(name)
|
| 132 |
+
|
| 133 |
+
@typing.overload
|
| 134 |
+
def TemplateResponse(
|
| 135 |
+
self,
|
| 136 |
+
request: Request,
|
| 137 |
+
name: str,
|
| 138 |
+
context: dict[str, typing.Any] | None = None,
|
| 139 |
+
status_code: int = 200,
|
| 140 |
+
headers: typing.Mapping[str, str] | None = None,
|
| 141 |
+
media_type: str | None = None,
|
| 142 |
+
background: BackgroundTask | None = None,
|
| 143 |
+
) -> _TemplateResponse: ...
|
| 144 |
+
|
| 145 |
+
@typing.overload
|
| 146 |
+
def TemplateResponse(
|
| 147 |
+
self,
|
| 148 |
+
name: str,
|
| 149 |
+
context: dict[str, typing.Any] | None = None,
|
| 150 |
+
status_code: int = 200,
|
| 151 |
+
headers: typing.Mapping[str, str] | None = None,
|
| 152 |
+
media_type: str | None = None,
|
| 153 |
+
background: BackgroundTask | None = None,
|
| 154 |
+
) -> _TemplateResponse:
|
| 155 |
+
# Deprecated usage
|
| 156 |
+
...
|
| 157 |
+
|
| 158 |
+
def TemplateResponse(self, *args: typing.Any, **kwargs: typing.Any) -> _TemplateResponse:
|
| 159 |
+
if args:
|
| 160 |
+
if isinstance(args[0], str): # the first argument is template name (old style)
|
| 161 |
+
warnings.warn(
|
| 162 |
+
"The `name` is not the first parameter anymore. "
|
| 163 |
+
"The first parameter should be the `Request` instance.\n"
|
| 164 |
+
'Replace `TemplateResponse(name, {"request": request})` by `TemplateResponse(request, name)`.',
|
| 165 |
+
DeprecationWarning,
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
name = args[0]
|
| 169 |
+
context = args[1] if len(args) > 1 else kwargs.get("context", {})
|
| 170 |
+
status_code = args[2] if len(args) > 2 else kwargs.get("status_code", 200)
|
| 171 |
+
headers = args[2] if len(args) > 2 else kwargs.get("headers")
|
| 172 |
+
media_type = args[3] if len(args) > 3 else kwargs.get("media_type")
|
| 173 |
+
background = args[4] if len(args) > 4 else kwargs.get("background")
|
| 174 |
+
|
| 175 |
+
if "request" not in context:
|
| 176 |
+
raise ValueError('context must include a "request" key')
|
| 177 |
+
request = context["request"]
|
| 178 |
+
else: # the first argument is a request instance (new style)
|
| 179 |
+
request = args[0]
|
| 180 |
+
name = args[1] if len(args) > 1 else kwargs["name"]
|
| 181 |
+
context = args[2] if len(args) > 2 else kwargs.get("context", {})
|
| 182 |
+
status_code = args[3] if len(args) > 3 else kwargs.get("status_code", 200)
|
| 183 |
+
headers = args[4] if len(args) > 4 else kwargs.get("headers")
|
| 184 |
+
media_type = args[5] if len(args) > 5 else kwargs.get("media_type")
|
| 185 |
+
background = args[6] if len(args) > 6 else kwargs.get("background")
|
| 186 |
+
else: # all arguments are kwargs
|
| 187 |
+
if "request" not in kwargs:
|
| 188 |
+
warnings.warn(
|
| 189 |
+
"The `TemplateResponse` now requires the `request` argument.\n"
|
| 190 |
+
'Replace `TemplateResponse(name, {"context": context})` by `TemplateResponse(request, name)`.',
|
| 191 |
+
DeprecationWarning,
|
| 192 |
+
)
|
| 193 |
+
if "request" not in kwargs.get("context", {}):
|
| 194 |
+
raise ValueError('context must include a "request" key')
|
| 195 |
+
|
| 196 |
+
context = kwargs.get("context", {})
|
| 197 |
+
request = kwargs.get("request", context.get("request"))
|
| 198 |
+
name = typing.cast(str, kwargs["name"])
|
| 199 |
+
status_code = kwargs.get("status_code", 200)
|
| 200 |
+
headers = kwargs.get("headers")
|
| 201 |
+
media_type = kwargs.get("media_type")
|
| 202 |
+
background = kwargs.get("background")
|
| 203 |
+
|
| 204 |
+
context.setdefault("request", request)
|
| 205 |
+
for context_processor in self.context_processors:
|
| 206 |
+
context.update(context_processor(request))
|
| 207 |
+
|
| 208 |
+
template = self.get_template(name)
|
| 209 |
+
return _TemplateResponse(
|
| 210 |
+
template,
|
| 211 |
+
context,
|
| 212 |
+
status_code=status_code,
|
| 213 |
+
headers=headers,
|
| 214 |
+
media_type=media_type,
|
| 215 |
+
background=background,
|
| 216 |
+
)
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/testclient.py
ADDED
|
@@ -0,0 +1,791 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import contextlib
|
| 4 |
+
import inspect
|
| 5 |
+
import io
|
| 6 |
+
import json
|
| 7 |
+
import math
|
| 8 |
+
import queue
|
| 9 |
+
import sys
|
| 10 |
+
import typing
|
| 11 |
+
import warnings
|
| 12 |
+
from concurrent.futures import Future
|
| 13 |
+
from functools import cached_property
|
| 14 |
+
from types import GeneratorType
|
| 15 |
+
from urllib.parse import unquote, urljoin
|
| 16 |
+
|
| 17 |
+
import anyio
|
| 18 |
+
import anyio.abc
|
| 19 |
+
import anyio.from_thread
|
| 20 |
+
from anyio.abc import ObjectReceiveStream, ObjectSendStream
|
| 21 |
+
from anyio.streams.stapled import StapledObjectStream
|
| 22 |
+
|
| 23 |
+
from starlette._utils import is_async_callable
|
| 24 |
+
from starlette.types import ASGIApp, Message, Receive, Scope, Send
|
| 25 |
+
from starlette.websockets import WebSocketDisconnect
|
| 26 |
+
|
| 27 |
+
if sys.version_info >= (3, 10): # pragma: no cover
|
| 28 |
+
from typing import TypeGuard
|
| 29 |
+
else: # pragma: no cover
|
| 30 |
+
from typing_extensions import TypeGuard
|
| 31 |
+
|
| 32 |
+
try:
|
| 33 |
+
import httpx
|
| 34 |
+
except ModuleNotFoundError: # pragma: no cover
|
| 35 |
+
raise RuntimeError(
|
| 36 |
+
"The starlette.testclient module requires the httpx package to be installed.\n"
|
| 37 |
+
"You can install this with:\n"
|
| 38 |
+
" $ pip install httpx\n"
|
| 39 |
+
)
|
| 40 |
+
_PortalFactoryType = typing.Callable[[], typing.ContextManager[anyio.abc.BlockingPortal]]
|
| 41 |
+
|
| 42 |
+
ASGIInstance = typing.Callable[[Receive, Send], typing.Awaitable[None]]
|
| 43 |
+
ASGI2App = typing.Callable[[Scope], ASGIInstance]
|
| 44 |
+
ASGI3App = typing.Callable[[Scope, Receive, Send], typing.Awaitable[None]]
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
_RequestData = typing.Mapping[str, typing.Union[str, typing.Iterable[str], bytes]]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _is_asgi3(app: ASGI2App | ASGI3App) -> TypeGuard[ASGI3App]:
|
| 51 |
+
if inspect.isclass(app):
|
| 52 |
+
return hasattr(app, "__await__")
|
| 53 |
+
return is_async_callable(app)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class _WrapASGI2:
|
| 57 |
+
"""
|
| 58 |
+
Provide an ASGI3 interface onto an ASGI2 app.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
def __init__(self, app: ASGI2App) -> None:
|
| 62 |
+
self.app = app
|
| 63 |
+
|
| 64 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 65 |
+
instance = self.app(scope)
|
| 66 |
+
await instance(receive, send)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class _AsyncBackend(typing.TypedDict):
|
| 70 |
+
backend: str
|
| 71 |
+
backend_options: dict[str, typing.Any]
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class _Upgrade(Exception):
|
| 75 |
+
def __init__(self, session: WebSocketTestSession) -> None:
|
| 76 |
+
self.session = session
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class WebSocketDenialResponse( # type: ignore[misc]
|
| 80 |
+
httpx.Response,
|
| 81 |
+
WebSocketDisconnect,
|
| 82 |
+
):
|
| 83 |
+
"""
|
| 84 |
+
A special case of `WebSocketDisconnect`, raised in the `TestClient` if the
|
| 85 |
+
`WebSocket` is closed before being accepted with a `send_denial_response()`.
|
| 86 |
+
"""
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
class WebSocketTestSession:
|
| 90 |
+
def __init__(
|
| 91 |
+
self,
|
| 92 |
+
app: ASGI3App,
|
| 93 |
+
scope: Scope,
|
| 94 |
+
portal_factory: _PortalFactoryType,
|
| 95 |
+
) -> None:
|
| 96 |
+
self.app = app
|
| 97 |
+
self.scope = scope
|
| 98 |
+
self.accepted_subprotocol = None
|
| 99 |
+
self.portal_factory = portal_factory
|
| 100 |
+
self._receive_queue: queue.Queue[Message] = queue.Queue()
|
| 101 |
+
self._send_queue: queue.Queue[Message | BaseException] = queue.Queue()
|
| 102 |
+
self.extra_headers = None
|
| 103 |
+
|
| 104 |
+
def __enter__(self) -> WebSocketTestSession:
|
| 105 |
+
self.exit_stack = contextlib.ExitStack()
|
| 106 |
+
self.portal = self.exit_stack.enter_context(self.portal_factory())
|
| 107 |
+
|
| 108 |
+
try:
|
| 109 |
+
_: Future[None] = self.portal.start_task_soon(self._run)
|
| 110 |
+
self.send({"type": "websocket.connect"})
|
| 111 |
+
message = self.receive()
|
| 112 |
+
self._raise_on_close(message)
|
| 113 |
+
except Exception:
|
| 114 |
+
self.exit_stack.close()
|
| 115 |
+
raise
|
| 116 |
+
self.accepted_subprotocol = message.get("subprotocol", None)
|
| 117 |
+
self.extra_headers = message.get("headers", None)
|
| 118 |
+
return self
|
| 119 |
+
|
| 120 |
+
@cached_property
|
| 121 |
+
def should_close(self) -> anyio.Event:
|
| 122 |
+
return anyio.Event()
|
| 123 |
+
|
| 124 |
+
async def _notify_close(self) -> None:
|
| 125 |
+
self.should_close.set()
|
| 126 |
+
|
| 127 |
+
def __exit__(self, *args: typing.Any) -> None:
|
| 128 |
+
try:
|
| 129 |
+
self.close(1000)
|
| 130 |
+
finally:
|
| 131 |
+
self.portal.start_task_soon(self._notify_close)
|
| 132 |
+
self.exit_stack.close()
|
| 133 |
+
while not self._send_queue.empty():
|
| 134 |
+
message = self._send_queue.get()
|
| 135 |
+
if isinstance(message, BaseException):
|
| 136 |
+
raise message
|
| 137 |
+
|
| 138 |
+
async def _run(self) -> None:
|
| 139 |
+
"""
|
| 140 |
+
The sub-thread in which the websocket session runs.
|
| 141 |
+
"""
|
| 142 |
+
|
| 143 |
+
async def run_app(tg: anyio.abc.TaskGroup) -> None:
|
| 144 |
+
try:
|
| 145 |
+
await self.app(self.scope, self._asgi_receive, self._asgi_send)
|
| 146 |
+
except anyio.get_cancelled_exc_class():
|
| 147 |
+
...
|
| 148 |
+
except BaseException as exc:
|
| 149 |
+
self._send_queue.put(exc)
|
| 150 |
+
raise
|
| 151 |
+
finally:
|
| 152 |
+
tg.cancel_scope.cancel()
|
| 153 |
+
|
| 154 |
+
async with anyio.create_task_group() as tg:
|
| 155 |
+
tg.start_soon(run_app, tg)
|
| 156 |
+
await self.should_close.wait()
|
| 157 |
+
tg.cancel_scope.cancel()
|
| 158 |
+
|
| 159 |
+
async def _asgi_receive(self) -> Message:
|
| 160 |
+
while self._receive_queue.empty():
|
| 161 |
+
self._queue_event = anyio.Event()
|
| 162 |
+
await self._queue_event.wait()
|
| 163 |
+
return self._receive_queue.get()
|
| 164 |
+
|
| 165 |
+
async def _asgi_send(self, message: Message) -> None:
|
| 166 |
+
self._send_queue.put(message)
|
| 167 |
+
|
| 168 |
+
def _raise_on_close(self, message: Message) -> None:
|
| 169 |
+
if message["type"] == "websocket.close":
|
| 170 |
+
raise WebSocketDisconnect(code=message.get("code", 1000), reason=message.get("reason", ""))
|
| 171 |
+
elif message["type"] == "websocket.http.response.start":
|
| 172 |
+
status_code: int = message["status"]
|
| 173 |
+
headers: list[tuple[bytes, bytes]] = message["headers"]
|
| 174 |
+
body: list[bytes] = []
|
| 175 |
+
while True:
|
| 176 |
+
message = self.receive()
|
| 177 |
+
assert message["type"] == "websocket.http.response.body"
|
| 178 |
+
body.append(message["body"])
|
| 179 |
+
if not message.get("more_body", False):
|
| 180 |
+
break
|
| 181 |
+
raise WebSocketDenialResponse(status_code=status_code, headers=headers, content=b"".join(body))
|
| 182 |
+
|
| 183 |
+
def send(self, message: Message) -> None:
|
| 184 |
+
self._receive_queue.put(message)
|
| 185 |
+
if hasattr(self, "_queue_event"):
|
| 186 |
+
self.portal.start_task_soon(self._queue_event.set)
|
| 187 |
+
|
| 188 |
+
def send_text(self, data: str) -> None:
|
| 189 |
+
self.send({"type": "websocket.receive", "text": data})
|
| 190 |
+
|
| 191 |
+
def send_bytes(self, data: bytes) -> None:
|
| 192 |
+
self.send({"type": "websocket.receive", "bytes": data})
|
| 193 |
+
|
| 194 |
+
def send_json(self, data: typing.Any, mode: typing.Literal["text", "binary"] = "text") -> None:
|
| 195 |
+
text = json.dumps(data, separators=(",", ":"), ensure_ascii=False)
|
| 196 |
+
if mode == "text":
|
| 197 |
+
self.send({"type": "websocket.receive", "text": text})
|
| 198 |
+
else:
|
| 199 |
+
self.send({"type": "websocket.receive", "bytes": text.encode("utf-8")})
|
| 200 |
+
|
| 201 |
+
def close(self, code: int = 1000, reason: str | None = None) -> None:
|
| 202 |
+
self.send({"type": "websocket.disconnect", "code": code, "reason": reason})
|
| 203 |
+
|
| 204 |
+
def receive(self) -> Message:
|
| 205 |
+
message = self._send_queue.get()
|
| 206 |
+
if isinstance(message, BaseException):
|
| 207 |
+
raise message
|
| 208 |
+
return message
|
| 209 |
+
|
| 210 |
+
def receive_text(self) -> str:
|
| 211 |
+
message = self.receive()
|
| 212 |
+
self._raise_on_close(message)
|
| 213 |
+
return typing.cast(str, message["text"])
|
| 214 |
+
|
| 215 |
+
def receive_bytes(self) -> bytes:
|
| 216 |
+
message = self.receive()
|
| 217 |
+
self._raise_on_close(message)
|
| 218 |
+
return typing.cast(bytes, message["bytes"])
|
| 219 |
+
|
| 220 |
+
def receive_json(self, mode: typing.Literal["text", "binary"] = "text") -> typing.Any:
|
| 221 |
+
message = self.receive()
|
| 222 |
+
self._raise_on_close(message)
|
| 223 |
+
if mode == "text":
|
| 224 |
+
text = message["text"]
|
| 225 |
+
else:
|
| 226 |
+
text = message["bytes"].decode("utf-8")
|
| 227 |
+
return json.loads(text)
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
class _TestClientTransport(httpx.BaseTransport):
|
| 231 |
+
def __init__(
|
| 232 |
+
self,
|
| 233 |
+
app: ASGI3App,
|
| 234 |
+
portal_factory: _PortalFactoryType,
|
| 235 |
+
raise_server_exceptions: bool = True,
|
| 236 |
+
root_path: str = "",
|
| 237 |
+
*,
|
| 238 |
+
app_state: dict[str, typing.Any],
|
| 239 |
+
) -> None:
|
| 240 |
+
self.app = app
|
| 241 |
+
self.raise_server_exceptions = raise_server_exceptions
|
| 242 |
+
self.root_path = root_path
|
| 243 |
+
self.portal_factory = portal_factory
|
| 244 |
+
self.app_state = app_state
|
| 245 |
+
|
| 246 |
+
def handle_request(self, request: httpx.Request) -> httpx.Response:
|
| 247 |
+
scheme = request.url.scheme
|
| 248 |
+
netloc = request.url.netloc.decode(encoding="ascii")
|
| 249 |
+
path = request.url.path
|
| 250 |
+
raw_path = request.url.raw_path
|
| 251 |
+
query = request.url.query.decode(encoding="ascii")
|
| 252 |
+
|
| 253 |
+
default_port = {"http": 80, "ws": 80, "https": 443, "wss": 443}[scheme]
|
| 254 |
+
|
| 255 |
+
if ":" in netloc:
|
| 256 |
+
host, port_string = netloc.split(":", 1)
|
| 257 |
+
port = int(port_string)
|
| 258 |
+
else:
|
| 259 |
+
host = netloc
|
| 260 |
+
port = default_port
|
| 261 |
+
|
| 262 |
+
# Include the 'host' header.
|
| 263 |
+
if "host" in request.headers:
|
| 264 |
+
headers: list[tuple[bytes, bytes]] = []
|
| 265 |
+
elif port == default_port: # pragma: no cover
|
| 266 |
+
headers = [(b"host", host.encode())]
|
| 267 |
+
else: # pragma: no cover
|
| 268 |
+
headers = [(b"host", (f"{host}:{port}").encode())]
|
| 269 |
+
|
| 270 |
+
# Include other request headers.
|
| 271 |
+
headers += [(key.lower().encode(), value.encode()) for key, value in request.headers.multi_items()]
|
| 272 |
+
|
| 273 |
+
scope: dict[str, typing.Any]
|
| 274 |
+
|
| 275 |
+
if scheme in {"ws", "wss"}:
|
| 276 |
+
subprotocol = request.headers.get("sec-websocket-protocol", None)
|
| 277 |
+
if subprotocol is None:
|
| 278 |
+
subprotocols: typing.Sequence[str] = []
|
| 279 |
+
else:
|
| 280 |
+
subprotocols = [value.strip() for value in subprotocol.split(",")]
|
| 281 |
+
scope = {
|
| 282 |
+
"type": "websocket",
|
| 283 |
+
"path": unquote(path),
|
| 284 |
+
"raw_path": raw_path.split(b"?", 1)[0],
|
| 285 |
+
"root_path": self.root_path,
|
| 286 |
+
"scheme": scheme,
|
| 287 |
+
"query_string": query.encode(),
|
| 288 |
+
"headers": headers,
|
| 289 |
+
"client": ["testclient", 50000],
|
| 290 |
+
"server": [host, port],
|
| 291 |
+
"subprotocols": subprotocols,
|
| 292 |
+
"state": self.app_state.copy(),
|
| 293 |
+
"extensions": {"websocket.http.response": {}},
|
| 294 |
+
}
|
| 295 |
+
session = WebSocketTestSession(self.app, scope, self.portal_factory)
|
| 296 |
+
raise _Upgrade(session)
|
| 297 |
+
|
| 298 |
+
scope = {
|
| 299 |
+
"type": "http",
|
| 300 |
+
"http_version": "1.1",
|
| 301 |
+
"method": request.method,
|
| 302 |
+
"path": unquote(path),
|
| 303 |
+
"raw_path": raw_path.split(b"?", 1)[0],
|
| 304 |
+
"root_path": self.root_path,
|
| 305 |
+
"scheme": scheme,
|
| 306 |
+
"query_string": query.encode(),
|
| 307 |
+
"headers": headers,
|
| 308 |
+
"client": ["testclient", 50000],
|
| 309 |
+
"server": [host, port],
|
| 310 |
+
"extensions": {"http.response.debug": {}},
|
| 311 |
+
"state": self.app_state.copy(),
|
| 312 |
+
}
|
| 313 |
+
|
| 314 |
+
request_complete = False
|
| 315 |
+
response_started = False
|
| 316 |
+
response_complete: anyio.Event
|
| 317 |
+
raw_kwargs: dict[str, typing.Any] = {"stream": io.BytesIO()}
|
| 318 |
+
template = None
|
| 319 |
+
context = None
|
| 320 |
+
|
| 321 |
+
async def receive() -> Message:
|
| 322 |
+
nonlocal request_complete
|
| 323 |
+
|
| 324 |
+
if request_complete:
|
| 325 |
+
if not response_complete.is_set():
|
| 326 |
+
await response_complete.wait()
|
| 327 |
+
return {"type": "http.disconnect"}
|
| 328 |
+
|
| 329 |
+
body = request.read()
|
| 330 |
+
if isinstance(body, str):
|
| 331 |
+
body_bytes: bytes = body.encode("utf-8") # pragma: no cover
|
| 332 |
+
elif body is None:
|
| 333 |
+
body_bytes = b"" # pragma: no cover
|
| 334 |
+
elif isinstance(body, GeneratorType):
|
| 335 |
+
try: # pragma: no cover
|
| 336 |
+
chunk = body.send(None)
|
| 337 |
+
if isinstance(chunk, str):
|
| 338 |
+
chunk = chunk.encode("utf-8")
|
| 339 |
+
return {"type": "http.request", "body": chunk, "more_body": True}
|
| 340 |
+
except StopIteration: # pragma: no cover
|
| 341 |
+
request_complete = True
|
| 342 |
+
return {"type": "http.request", "body": b""}
|
| 343 |
+
else:
|
| 344 |
+
body_bytes = body
|
| 345 |
+
|
| 346 |
+
request_complete = True
|
| 347 |
+
return {"type": "http.request", "body": body_bytes}
|
| 348 |
+
|
| 349 |
+
async def send(message: Message) -> None:
|
| 350 |
+
nonlocal raw_kwargs, response_started, template, context
|
| 351 |
+
|
| 352 |
+
if message["type"] == "http.response.start":
|
| 353 |
+
assert not response_started, 'Received multiple "http.response.start" messages.'
|
| 354 |
+
raw_kwargs["status_code"] = message["status"]
|
| 355 |
+
raw_kwargs["headers"] = [(key.decode(), value.decode()) for key, value in message.get("headers", [])]
|
| 356 |
+
response_started = True
|
| 357 |
+
elif message["type"] == "http.response.body":
|
| 358 |
+
assert response_started, 'Received "http.response.body" without "http.response.start".'
|
| 359 |
+
assert not response_complete.is_set(), 'Received "http.response.body" after response completed.'
|
| 360 |
+
body = message.get("body", b"")
|
| 361 |
+
more_body = message.get("more_body", False)
|
| 362 |
+
if request.method != "HEAD":
|
| 363 |
+
raw_kwargs["stream"].write(body)
|
| 364 |
+
if not more_body:
|
| 365 |
+
raw_kwargs["stream"].seek(0)
|
| 366 |
+
response_complete.set()
|
| 367 |
+
elif message["type"] == "http.response.debug":
|
| 368 |
+
template = message["info"]["template"]
|
| 369 |
+
context = message["info"]["context"]
|
| 370 |
+
|
| 371 |
+
try:
|
| 372 |
+
with self.portal_factory() as portal:
|
| 373 |
+
response_complete = portal.call(anyio.Event)
|
| 374 |
+
portal.call(self.app, scope, receive, send)
|
| 375 |
+
except BaseException as exc:
|
| 376 |
+
if self.raise_server_exceptions:
|
| 377 |
+
raise exc
|
| 378 |
+
|
| 379 |
+
if self.raise_server_exceptions:
|
| 380 |
+
assert response_started, "TestClient did not receive any response."
|
| 381 |
+
elif not response_started:
|
| 382 |
+
raw_kwargs = {
|
| 383 |
+
"status_code": 500,
|
| 384 |
+
"headers": [],
|
| 385 |
+
"stream": io.BytesIO(),
|
| 386 |
+
}
|
| 387 |
+
|
| 388 |
+
raw_kwargs["stream"] = httpx.ByteStream(raw_kwargs["stream"].read())
|
| 389 |
+
|
| 390 |
+
response = httpx.Response(**raw_kwargs, request=request)
|
| 391 |
+
if template is not None:
|
| 392 |
+
response.template = template # type: ignore[attr-defined]
|
| 393 |
+
response.context = context # type: ignore[attr-defined]
|
| 394 |
+
return response
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
class TestClient(httpx.Client):
|
| 398 |
+
__test__ = False
|
| 399 |
+
task: Future[None]
|
| 400 |
+
portal: anyio.abc.BlockingPortal | None = None
|
| 401 |
+
|
| 402 |
+
def __init__(
|
| 403 |
+
self,
|
| 404 |
+
app: ASGIApp,
|
| 405 |
+
base_url: str = "http://testserver",
|
| 406 |
+
raise_server_exceptions: bool = True,
|
| 407 |
+
root_path: str = "",
|
| 408 |
+
backend: typing.Literal["asyncio", "trio"] = "asyncio",
|
| 409 |
+
backend_options: dict[str, typing.Any] | None = None,
|
| 410 |
+
cookies: httpx._types.CookieTypes | None = None,
|
| 411 |
+
headers: dict[str, str] | None = None,
|
| 412 |
+
follow_redirects: bool = True,
|
| 413 |
+
) -> None:
|
| 414 |
+
self.async_backend = _AsyncBackend(backend=backend, backend_options=backend_options or {})
|
| 415 |
+
if _is_asgi3(app):
|
| 416 |
+
asgi_app = app
|
| 417 |
+
else:
|
| 418 |
+
app = typing.cast(ASGI2App, app) # type: ignore[assignment]
|
| 419 |
+
asgi_app = _WrapASGI2(app) # type: ignore[arg-type]
|
| 420 |
+
self.app = asgi_app
|
| 421 |
+
self.app_state: dict[str, typing.Any] = {}
|
| 422 |
+
transport = _TestClientTransport(
|
| 423 |
+
self.app,
|
| 424 |
+
portal_factory=self._portal_factory,
|
| 425 |
+
raise_server_exceptions=raise_server_exceptions,
|
| 426 |
+
root_path=root_path,
|
| 427 |
+
app_state=self.app_state,
|
| 428 |
+
)
|
| 429 |
+
if headers is None:
|
| 430 |
+
headers = {}
|
| 431 |
+
headers.setdefault("user-agent", "testclient")
|
| 432 |
+
super().__init__(
|
| 433 |
+
base_url=base_url,
|
| 434 |
+
headers=headers,
|
| 435 |
+
transport=transport,
|
| 436 |
+
follow_redirects=follow_redirects,
|
| 437 |
+
cookies=cookies,
|
| 438 |
+
)
|
| 439 |
+
|
| 440 |
+
@contextlib.contextmanager
|
| 441 |
+
def _portal_factory(self) -> typing.Generator[anyio.abc.BlockingPortal, None, None]:
|
| 442 |
+
if self.portal is not None:
|
| 443 |
+
yield self.portal
|
| 444 |
+
else:
|
| 445 |
+
with anyio.from_thread.start_blocking_portal(**self.async_backend) as portal:
|
| 446 |
+
yield portal
|
| 447 |
+
|
| 448 |
+
def _choose_redirect_arg(
|
| 449 |
+
self, follow_redirects: bool | None, allow_redirects: bool | None
|
| 450 |
+
) -> bool | httpx._client.UseClientDefault:
|
| 451 |
+
redirect: bool | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT
|
| 452 |
+
if allow_redirects is not None:
|
| 453 |
+
message = "The `allow_redirects` argument is deprecated. Use `follow_redirects` instead."
|
| 454 |
+
warnings.warn(message, DeprecationWarning)
|
| 455 |
+
redirect = allow_redirects
|
| 456 |
+
if follow_redirects is not None:
|
| 457 |
+
redirect = follow_redirects
|
| 458 |
+
elif allow_redirects is not None and follow_redirects is not None:
|
| 459 |
+
raise RuntimeError( # pragma: no cover
|
| 460 |
+
"Cannot use both `allow_redirects` and `follow_redirects`."
|
| 461 |
+
)
|
| 462 |
+
return redirect
|
| 463 |
+
|
| 464 |
+
def request( # type: ignore[override]
|
| 465 |
+
self,
|
| 466 |
+
method: str,
|
| 467 |
+
url: httpx._types.URLTypes,
|
| 468 |
+
*,
|
| 469 |
+
content: httpx._types.RequestContent | None = None,
|
| 470 |
+
data: _RequestData | None = None,
|
| 471 |
+
files: httpx._types.RequestFiles | None = None,
|
| 472 |
+
json: typing.Any = None,
|
| 473 |
+
params: httpx._types.QueryParamTypes | None = None,
|
| 474 |
+
headers: httpx._types.HeaderTypes | None = None,
|
| 475 |
+
cookies: httpx._types.CookieTypes | None = None,
|
| 476 |
+
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 477 |
+
follow_redirects: bool | None = None,
|
| 478 |
+
allow_redirects: bool | None = None,
|
| 479 |
+
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 480 |
+
extensions: dict[str, typing.Any] | None = None,
|
| 481 |
+
) -> httpx.Response:
|
| 482 |
+
url = self._merge_url(url)
|
| 483 |
+
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
|
| 484 |
+
return super().request(
|
| 485 |
+
method,
|
| 486 |
+
url,
|
| 487 |
+
content=content,
|
| 488 |
+
data=data,
|
| 489 |
+
files=files,
|
| 490 |
+
json=json,
|
| 491 |
+
params=params,
|
| 492 |
+
headers=headers,
|
| 493 |
+
cookies=cookies,
|
| 494 |
+
auth=auth,
|
| 495 |
+
follow_redirects=redirect,
|
| 496 |
+
timeout=timeout,
|
| 497 |
+
extensions=extensions,
|
| 498 |
+
)
|
| 499 |
+
|
| 500 |
+
def get( # type: ignore[override]
|
| 501 |
+
self,
|
| 502 |
+
url: httpx._types.URLTypes,
|
| 503 |
+
*,
|
| 504 |
+
params: httpx._types.QueryParamTypes | None = None,
|
| 505 |
+
headers: httpx._types.HeaderTypes | None = None,
|
| 506 |
+
cookies: httpx._types.CookieTypes | None = None,
|
| 507 |
+
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 508 |
+
follow_redirects: bool | None = None,
|
| 509 |
+
allow_redirects: bool | None = None,
|
| 510 |
+
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 511 |
+
extensions: dict[str, typing.Any] | None = None,
|
| 512 |
+
) -> httpx.Response:
|
| 513 |
+
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
|
| 514 |
+
return super().get(
|
| 515 |
+
url,
|
| 516 |
+
params=params,
|
| 517 |
+
headers=headers,
|
| 518 |
+
cookies=cookies,
|
| 519 |
+
auth=auth,
|
| 520 |
+
follow_redirects=redirect,
|
| 521 |
+
timeout=timeout,
|
| 522 |
+
extensions=extensions,
|
| 523 |
+
)
|
| 524 |
+
|
| 525 |
+
def options( # type: ignore[override]
|
| 526 |
+
self,
|
| 527 |
+
url: httpx._types.URLTypes,
|
| 528 |
+
*,
|
| 529 |
+
params: httpx._types.QueryParamTypes | None = None,
|
| 530 |
+
headers: httpx._types.HeaderTypes | None = None,
|
| 531 |
+
cookies: httpx._types.CookieTypes | None = None,
|
| 532 |
+
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 533 |
+
follow_redirects: bool | None = None,
|
| 534 |
+
allow_redirects: bool | None = None,
|
| 535 |
+
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 536 |
+
extensions: dict[str, typing.Any] | None = None,
|
| 537 |
+
) -> httpx.Response:
|
| 538 |
+
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
|
| 539 |
+
return super().options(
|
| 540 |
+
url,
|
| 541 |
+
params=params,
|
| 542 |
+
headers=headers,
|
| 543 |
+
cookies=cookies,
|
| 544 |
+
auth=auth,
|
| 545 |
+
follow_redirects=redirect,
|
| 546 |
+
timeout=timeout,
|
| 547 |
+
extensions=extensions,
|
| 548 |
+
)
|
| 549 |
+
|
| 550 |
+
def head( # type: ignore[override]
|
| 551 |
+
self,
|
| 552 |
+
url: httpx._types.URLTypes,
|
| 553 |
+
*,
|
| 554 |
+
params: httpx._types.QueryParamTypes | None = None,
|
| 555 |
+
headers: httpx._types.HeaderTypes | None = None,
|
| 556 |
+
cookies: httpx._types.CookieTypes | None = None,
|
| 557 |
+
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 558 |
+
follow_redirects: bool | None = None,
|
| 559 |
+
allow_redirects: bool | None = None,
|
| 560 |
+
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 561 |
+
extensions: dict[str, typing.Any] | None = None,
|
| 562 |
+
) -> httpx.Response:
|
| 563 |
+
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
|
| 564 |
+
return super().head(
|
| 565 |
+
url,
|
| 566 |
+
params=params,
|
| 567 |
+
headers=headers,
|
| 568 |
+
cookies=cookies,
|
| 569 |
+
auth=auth,
|
| 570 |
+
follow_redirects=redirect,
|
| 571 |
+
timeout=timeout,
|
| 572 |
+
extensions=extensions,
|
| 573 |
+
)
|
| 574 |
+
|
| 575 |
+
def post( # type: ignore[override]
|
| 576 |
+
self,
|
| 577 |
+
url: httpx._types.URLTypes,
|
| 578 |
+
*,
|
| 579 |
+
content: httpx._types.RequestContent | None = None,
|
| 580 |
+
data: _RequestData | None = None,
|
| 581 |
+
files: httpx._types.RequestFiles | None = None,
|
| 582 |
+
json: typing.Any = None,
|
| 583 |
+
params: httpx._types.QueryParamTypes | None = None,
|
| 584 |
+
headers: httpx._types.HeaderTypes | None = None,
|
| 585 |
+
cookies: httpx._types.CookieTypes | None = None,
|
| 586 |
+
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 587 |
+
follow_redirects: bool | None = None,
|
| 588 |
+
allow_redirects: bool | None = None,
|
| 589 |
+
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 590 |
+
extensions: dict[str, typing.Any] | None = None,
|
| 591 |
+
) -> httpx.Response:
|
| 592 |
+
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
|
| 593 |
+
return super().post(
|
| 594 |
+
url,
|
| 595 |
+
content=content,
|
| 596 |
+
data=data,
|
| 597 |
+
files=files,
|
| 598 |
+
json=json,
|
| 599 |
+
params=params,
|
| 600 |
+
headers=headers,
|
| 601 |
+
cookies=cookies,
|
| 602 |
+
auth=auth,
|
| 603 |
+
follow_redirects=redirect,
|
| 604 |
+
timeout=timeout,
|
| 605 |
+
extensions=extensions,
|
| 606 |
+
)
|
| 607 |
+
|
| 608 |
+
def put( # type: ignore[override]
|
| 609 |
+
self,
|
| 610 |
+
url: httpx._types.URLTypes,
|
| 611 |
+
*,
|
| 612 |
+
content: httpx._types.RequestContent | None = None,
|
| 613 |
+
data: _RequestData | None = None,
|
| 614 |
+
files: httpx._types.RequestFiles | None = None,
|
| 615 |
+
json: typing.Any = None,
|
| 616 |
+
params: httpx._types.QueryParamTypes | None = None,
|
| 617 |
+
headers: httpx._types.HeaderTypes | None = None,
|
| 618 |
+
cookies: httpx._types.CookieTypes | None = None,
|
| 619 |
+
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 620 |
+
follow_redirects: bool | None = None,
|
| 621 |
+
allow_redirects: bool | None = None,
|
| 622 |
+
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 623 |
+
extensions: dict[str, typing.Any] | None = None,
|
| 624 |
+
) -> httpx.Response:
|
| 625 |
+
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
|
| 626 |
+
return super().put(
|
| 627 |
+
url,
|
| 628 |
+
content=content,
|
| 629 |
+
data=data,
|
| 630 |
+
files=files,
|
| 631 |
+
json=json,
|
| 632 |
+
params=params,
|
| 633 |
+
headers=headers,
|
| 634 |
+
cookies=cookies,
|
| 635 |
+
auth=auth,
|
| 636 |
+
follow_redirects=redirect,
|
| 637 |
+
timeout=timeout,
|
| 638 |
+
extensions=extensions,
|
| 639 |
+
)
|
| 640 |
+
|
| 641 |
+
def patch( # type: ignore[override]
|
| 642 |
+
self,
|
| 643 |
+
url: httpx._types.URLTypes,
|
| 644 |
+
*,
|
| 645 |
+
content: httpx._types.RequestContent | None = None,
|
| 646 |
+
data: _RequestData | None = None,
|
| 647 |
+
files: httpx._types.RequestFiles | None = None,
|
| 648 |
+
json: typing.Any = None,
|
| 649 |
+
params: httpx._types.QueryParamTypes | None = None,
|
| 650 |
+
headers: httpx._types.HeaderTypes | None = None,
|
| 651 |
+
cookies: httpx._types.CookieTypes | None = None,
|
| 652 |
+
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 653 |
+
follow_redirects: bool | None = None,
|
| 654 |
+
allow_redirects: bool | None = None,
|
| 655 |
+
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 656 |
+
extensions: dict[str, typing.Any] | None = None,
|
| 657 |
+
) -> httpx.Response:
|
| 658 |
+
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
|
| 659 |
+
return super().patch(
|
| 660 |
+
url,
|
| 661 |
+
content=content,
|
| 662 |
+
data=data,
|
| 663 |
+
files=files,
|
| 664 |
+
json=json,
|
| 665 |
+
params=params,
|
| 666 |
+
headers=headers,
|
| 667 |
+
cookies=cookies,
|
| 668 |
+
auth=auth,
|
| 669 |
+
follow_redirects=redirect,
|
| 670 |
+
timeout=timeout,
|
| 671 |
+
extensions=extensions,
|
| 672 |
+
)
|
| 673 |
+
|
| 674 |
+
def delete( # type: ignore[override]
|
| 675 |
+
self,
|
| 676 |
+
url: httpx._types.URLTypes,
|
| 677 |
+
*,
|
| 678 |
+
params: httpx._types.QueryParamTypes | None = None,
|
| 679 |
+
headers: httpx._types.HeaderTypes | None = None,
|
| 680 |
+
cookies: httpx._types.CookieTypes | None = None,
|
| 681 |
+
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 682 |
+
follow_redirects: bool | None = None,
|
| 683 |
+
allow_redirects: bool | None = None,
|
| 684 |
+
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
|
| 685 |
+
extensions: dict[str, typing.Any] | None = None,
|
| 686 |
+
) -> httpx.Response:
|
| 687 |
+
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
|
| 688 |
+
return super().delete(
|
| 689 |
+
url,
|
| 690 |
+
params=params,
|
| 691 |
+
headers=headers,
|
| 692 |
+
cookies=cookies,
|
| 693 |
+
auth=auth,
|
| 694 |
+
follow_redirects=redirect,
|
| 695 |
+
timeout=timeout,
|
| 696 |
+
extensions=extensions,
|
| 697 |
+
)
|
| 698 |
+
|
| 699 |
+
def websocket_connect(
|
| 700 |
+
self,
|
| 701 |
+
url: str,
|
| 702 |
+
subprotocols: typing.Sequence[str] | None = None,
|
| 703 |
+
**kwargs: typing.Any,
|
| 704 |
+
) -> WebSocketTestSession:
|
| 705 |
+
url = urljoin("ws://testserver", url)
|
| 706 |
+
headers = kwargs.get("headers", {})
|
| 707 |
+
headers.setdefault("connection", "upgrade")
|
| 708 |
+
headers.setdefault("sec-websocket-key", "testserver==")
|
| 709 |
+
headers.setdefault("sec-websocket-version", "13")
|
| 710 |
+
if subprotocols is not None:
|
| 711 |
+
headers.setdefault("sec-websocket-protocol", ", ".join(subprotocols))
|
| 712 |
+
kwargs["headers"] = headers
|
| 713 |
+
try:
|
| 714 |
+
super().request("GET", url, **kwargs)
|
| 715 |
+
except _Upgrade as exc:
|
| 716 |
+
session = exc.session
|
| 717 |
+
else:
|
| 718 |
+
raise RuntimeError("Expected WebSocket upgrade") # pragma: no cover
|
| 719 |
+
|
| 720 |
+
return session
|
| 721 |
+
|
| 722 |
+
def __enter__(self) -> TestClient:
|
| 723 |
+
with contextlib.ExitStack() as stack:
|
| 724 |
+
self.portal = portal = stack.enter_context(anyio.from_thread.start_blocking_portal(**self.async_backend))
|
| 725 |
+
|
| 726 |
+
@stack.callback
|
| 727 |
+
def reset_portal() -> None:
|
| 728 |
+
self.portal = None
|
| 729 |
+
|
| 730 |
+
send1: ObjectSendStream[typing.MutableMapping[str, typing.Any] | None]
|
| 731 |
+
receive1: ObjectReceiveStream[typing.MutableMapping[str, typing.Any] | None]
|
| 732 |
+
send2: ObjectSendStream[typing.MutableMapping[str, typing.Any]]
|
| 733 |
+
receive2: ObjectReceiveStream[typing.MutableMapping[str, typing.Any]]
|
| 734 |
+
send1, receive1 = anyio.create_memory_object_stream(math.inf)
|
| 735 |
+
send2, receive2 = anyio.create_memory_object_stream(math.inf)
|
| 736 |
+
self.stream_send = StapledObjectStream(send1, receive1)
|
| 737 |
+
self.stream_receive = StapledObjectStream(send2, receive2)
|
| 738 |
+
self.task = portal.start_task_soon(self.lifespan)
|
| 739 |
+
portal.call(self.wait_startup)
|
| 740 |
+
|
| 741 |
+
@stack.callback
|
| 742 |
+
def wait_shutdown() -> None:
|
| 743 |
+
portal.call(self.wait_shutdown)
|
| 744 |
+
|
| 745 |
+
self.exit_stack = stack.pop_all()
|
| 746 |
+
|
| 747 |
+
return self
|
| 748 |
+
|
| 749 |
+
def __exit__(self, *args: typing.Any) -> None:
|
| 750 |
+
self.exit_stack.close()
|
| 751 |
+
|
| 752 |
+
async def lifespan(self) -> None:
|
| 753 |
+
scope = {"type": "lifespan", "state": self.app_state}
|
| 754 |
+
try:
|
| 755 |
+
await self.app(scope, self.stream_receive.receive, self.stream_send.send)
|
| 756 |
+
finally:
|
| 757 |
+
await self.stream_send.send(None)
|
| 758 |
+
|
| 759 |
+
async def wait_startup(self) -> None:
|
| 760 |
+
await self.stream_receive.send({"type": "lifespan.startup"})
|
| 761 |
+
|
| 762 |
+
async def receive() -> typing.Any:
|
| 763 |
+
message = await self.stream_send.receive()
|
| 764 |
+
if message is None:
|
| 765 |
+
self.task.result()
|
| 766 |
+
return message
|
| 767 |
+
|
| 768 |
+
message = await receive()
|
| 769 |
+
assert message["type"] in (
|
| 770 |
+
"lifespan.startup.complete",
|
| 771 |
+
"lifespan.startup.failed",
|
| 772 |
+
)
|
| 773 |
+
if message["type"] == "lifespan.startup.failed":
|
| 774 |
+
await receive()
|
| 775 |
+
|
| 776 |
+
async def wait_shutdown(self) -> None:
|
| 777 |
+
async def receive() -> typing.Any:
|
| 778 |
+
message = await self.stream_send.receive()
|
| 779 |
+
if message is None:
|
| 780 |
+
self.task.result()
|
| 781 |
+
return message
|
| 782 |
+
|
| 783 |
+
async with self.stream_send, self.stream_receive:
|
| 784 |
+
await self.stream_receive.send({"type": "lifespan.shutdown"})
|
| 785 |
+
message = await receive()
|
| 786 |
+
assert message["type"] in (
|
| 787 |
+
"lifespan.shutdown.complete",
|
| 788 |
+
"lifespan.shutdown.failed",
|
| 789 |
+
)
|
| 790 |
+
if message["type"] == "lifespan.shutdown.failed":
|
| 791 |
+
await receive()
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/types.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import typing
|
| 2 |
+
|
| 3 |
+
if typing.TYPE_CHECKING:
|
| 4 |
+
from starlette.requests import Request
|
| 5 |
+
from starlette.responses import Response
|
| 6 |
+
from starlette.websockets import WebSocket
|
| 7 |
+
|
| 8 |
+
AppType = typing.TypeVar("AppType")
|
| 9 |
+
|
| 10 |
+
Scope = typing.MutableMapping[str, typing.Any]
|
| 11 |
+
Message = typing.MutableMapping[str, typing.Any]
|
| 12 |
+
|
| 13 |
+
Receive = typing.Callable[[], typing.Awaitable[Message]]
|
| 14 |
+
Send = typing.Callable[[Message], typing.Awaitable[None]]
|
| 15 |
+
|
| 16 |
+
ASGIApp = typing.Callable[[Scope, Receive, Send], typing.Awaitable[None]]
|
| 17 |
+
|
| 18 |
+
StatelessLifespan = typing.Callable[[AppType], typing.AsyncContextManager[None]]
|
| 19 |
+
StatefulLifespan = typing.Callable[[AppType], typing.AsyncContextManager[typing.Mapping[str, typing.Any]]]
|
| 20 |
+
Lifespan = typing.Union[StatelessLifespan[AppType], StatefulLifespan[AppType]]
|
| 21 |
+
|
| 22 |
+
HTTPExceptionHandler = typing.Callable[["Request", Exception], "Response | typing.Awaitable[Response]"]
|
| 23 |
+
WebSocketExceptionHandler = typing.Callable[["WebSocket", Exception], typing.Awaitable[None]]
|
| 24 |
+
ExceptionHandler = typing.Union[HTTPExceptionHandler, WebSocketExceptionHandler]
|
evalkit_cambrian/lib/python3.10/site-packages/starlette/websockets.py
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import enum
|
| 4 |
+
import json
|
| 5 |
+
import typing
|
| 6 |
+
|
| 7 |
+
from starlette.requests import HTTPConnection
|
| 8 |
+
from starlette.responses import Response
|
| 9 |
+
from starlette.types import Message, Receive, Scope, Send
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class WebSocketState(enum.Enum):
|
| 13 |
+
CONNECTING = 0
|
| 14 |
+
CONNECTED = 1
|
| 15 |
+
DISCONNECTED = 2
|
| 16 |
+
RESPONSE = 3
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class WebSocketDisconnect(Exception):
|
| 20 |
+
def __init__(self, code: int = 1000, reason: str | None = None) -> None:
|
| 21 |
+
self.code = code
|
| 22 |
+
self.reason = reason or ""
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class WebSocket(HTTPConnection):
|
| 26 |
+
def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 27 |
+
super().__init__(scope)
|
| 28 |
+
assert scope["type"] == "websocket"
|
| 29 |
+
self._receive = receive
|
| 30 |
+
self._send = send
|
| 31 |
+
self.client_state = WebSocketState.CONNECTING
|
| 32 |
+
self.application_state = WebSocketState.CONNECTING
|
| 33 |
+
|
| 34 |
+
async def receive(self) -> Message:
|
| 35 |
+
"""
|
| 36 |
+
Receive ASGI websocket messages, ensuring valid state transitions.
|
| 37 |
+
"""
|
| 38 |
+
if self.client_state == WebSocketState.CONNECTING:
|
| 39 |
+
message = await self._receive()
|
| 40 |
+
message_type = message["type"]
|
| 41 |
+
if message_type != "websocket.connect":
|
| 42 |
+
raise RuntimeError(f'Expected ASGI message "websocket.connect", but got {message_type!r}')
|
| 43 |
+
self.client_state = WebSocketState.CONNECTED
|
| 44 |
+
return message
|
| 45 |
+
elif self.client_state == WebSocketState.CONNECTED:
|
| 46 |
+
message = await self._receive()
|
| 47 |
+
message_type = message["type"]
|
| 48 |
+
if message_type not in {"websocket.receive", "websocket.disconnect"}:
|
| 49 |
+
raise RuntimeError(
|
| 50 |
+
f'Expected ASGI message "websocket.receive" or "websocket.disconnect", but got {message_type!r}'
|
| 51 |
+
)
|
| 52 |
+
if message_type == "websocket.disconnect":
|
| 53 |
+
self.client_state = WebSocketState.DISCONNECTED
|
| 54 |
+
return message
|
| 55 |
+
else:
|
| 56 |
+
raise RuntimeError('Cannot call "receive" once a disconnect message has been received.')
|
| 57 |
+
|
| 58 |
+
async def send(self, message: Message) -> None:
|
| 59 |
+
"""
|
| 60 |
+
Send ASGI websocket messages, ensuring valid state transitions.
|
| 61 |
+
"""
|
| 62 |
+
if self.application_state == WebSocketState.CONNECTING:
|
| 63 |
+
message_type = message["type"]
|
| 64 |
+
if message_type not in {"websocket.accept", "websocket.close", "websocket.http.response.start"}:
|
| 65 |
+
raise RuntimeError(
|
| 66 |
+
'Expected ASGI message "websocket.accept", "websocket.close" or "websocket.http.response.start", '
|
| 67 |
+
f"but got {message_type!r}"
|
| 68 |
+
)
|
| 69 |
+
if message_type == "websocket.close":
|
| 70 |
+
self.application_state = WebSocketState.DISCONNECTED
|
| 71 |
+
elif message_type == "websocket.http.response.start":
|
| 72 |
+
self.application_state = WebSocketState.RESPONSE
|
| 73 |
+
else:
|
| 74 |
+
self.application_state = WebSocketState.CONNECTED
|
| 75 |
+
await self._send(message)
|
| 76 |
+
elif self.application_state == WebSocketState.CONNECTED:
|
| 77 |
+
message_type = message["type"]
|
| 78 |
+
if message_type not in {"websocket.send", "websocket.close"}:
|
| 79 |
+
raise RuntimeError(
|
| 80 |
+
f'Expected ASGI message "websocket.send" or "websocket.close", but got {message_type!r}'
|
| 81 |
+
)
|
| 82 |
+
if message_type == "websocket.close":
|
| 83 |
+
self.application_state = WebSocketState.DISCONNECTED
|
| 84 |
+
try:
|
| 85 |
+
await self._send(message)
|
| 86 |
+
except OSError:
|
| 87 |
+
self.application_state = WebSocketState.DISCONNECTED
|
| 88 |
+
raise WebSocketDisconnect(code=1006)
|
| 89 |
+
elif self.application_state == WebSocketState.RESPONSE:
|
| 90 |
+
message_type = message["type"]
|
| 91 |
+
if message_type != "websocket.http.response.body":
|
| 92 |
+
raise RuntimeError(f'Expected ASGI message "websocket.http.response.body", but got {message_type!r}')
|
| 93 |
+
if not message.get("more_body", False):
|
| 94 |
+
self.application_state = WebSocketState.DISCONNECTED
|
| 95 |
+
await self._send(message)
|
| 96 |
+
else:
|
| 97 |
+
raise RuntimeError('Cannot call "send" once a close message has been sent.')
|
| 98 |
+
|
| 99 |
+
async def accept(
|
| 100 |
+
self,
|
| 101 |
+
subprotocol: str | None = None,
|
| 102 |
+
headers: typing.Iterable[tuple[bytes, bytes]] | None = None,
|
| 103 |
+
) -> None:
|
| 104 |
+
headers = headers or []
|
| 105 |
+
|
| 106 |
+
if self.client_state == WebSocketState.CONNECTING:
|
| 107 |
+
# If we haven't yet seen the 'connect' message, then wait for it first.
|
| 108 |
+
await self.receive()
|
| 109 |
+
await self.send({"type": "websocket.accept", "subprotocol": subprotocol, "headers": headers})
|
| 110 |
+
|
| 111 |
+
def _raise_on_disconnect(self, message: Message) -> None:
|
| 112 |
+
if message["type"] == "websocket.disconnect":
|
| 113 |
+
raise WebSocketDisconnect(message["code"], message.get("reason"))
|
| 114 |
+
|
| 115 |
+
async def receive_text(self) -> str:
|
| 116 |
+
if self.application_state != WebSocketState.CONNECTED:
|
| 117 |
+
raise RuntimeError('WebSocket is not connected. Need to call "accept" first.')
|
| 118 |
+
message = await self.receive()
|
| 119 |
+
self._raise_on_disconnect(message)
|
| 120 |
+
return typing.cast(str, message["text"])
|
| 121 |
+
|
| 122 |
+
async def receive_bytes(self) -> bytes:
|
| 123 |
+
if self.application_state != WebSocketState.CONNECTED:
|
| 124 |
+
raise RuntimeError('WebSocket is not connected. Need to call "accept" first.')
|
| 125 |
+
message = await self.receive()
|
| 126 |
+
self._raise_on_disconnect(message)
|
| 127 |
+
return typing.cast(bytes, message["bytes"])
|
| 128 |
+
|
| 129 |
+
async def receive_json(self, mode: str = "text") -> typing.Any:
|
| 130 |
+
if mode not in {"text", "binary"}:
|
| 131 |
+
raise RuntimeError('The "mode" argument should be "text" or "binary".')
|
| 132 |
+
if self.application_state != WebSocketState.CONNECTED:
|
| 133 |
+
raise RuntimeError('WebSocket is not connected. Need to call "accept" first.')
|
| 134 |
+
message = await self.receive()
|
| 135 |
+
self._raise_on_disconnect(message)
|
| 136 |
+
|
| 137 |
+
if mode == "text":
|
| 138 |
+
text = message["text"]
|
| 139 |
+
else:
|
| 140 |
+
text = message["bytes"].decode("utf-8")
|
| 141 |
+
return json.loads(text)
|
| 142 |
+
|
| 143 |
+
async def iter_text(self) -> typing.AsyncIterator[str]:
|
| 144 |
+
try:
|
| 145 |
+
while True:
|
| 146 |
+
yield await self.receive_text()
|
| 147 |
+
except WebSocketDisconnect:
|
| 148 |
+
pass
|
| 149 |
+
|
| 150 |
+
async def iter_bytes(self) -> typing.AsyncIterator[bytes]:
|
| 151 |
+
try:
|
| 152 |
+
while True:
|
| 153 |
+
yield await self.receive_bytes()
|
| 154 |
+
except WebSocketDisconnect:
|
| 155 |
+
pass
|
| 156 |
+
|
| 157 |
+
async def iter_json(self) -> typing.AsyncIterator[typing.Any]:
|
| 158 |
+
try:
|
| 159 |
+
while True:
|
| 160 |
+
yield await self.receive_json()
|
| 161 |
+
except WebSocketDisconnect:
|
| 162 |
+
pass
|
| 163 |
+
|
| 164 |
+
async def send_text(self, data: str) -> None:
|
| 165 |
+
await self.send({"type": "websocket.send", "text": data})
|
| 166 |
+
|
| 167 |
+
async def send_bytes(self, data: bytes) -> None:
|
| 168 |
+
await self.send({"type": "websocket.send", "bytes": data})
|
| 169 |
+
|
| 170 |
+
async def send_json(self, data: typing.Any, mode: str = "text") -> None:
|
| 171 |
+
if mode not in {"text", "binary"}:
|
| 172 |
+
raise RuntimeError('The "mode" argument should be "text" or "binary".')
|
| 173 |
+
text = json.dumps(data, separators=(",", ":"), ensure_ascii=False)
|
| 174 |
+
if mode == "text":
|
| 175 |
+
await self.send({"type": "websocket.send", "text": text})
|
| 176 |
+
else:
|
| 177 |
+
await self.send({"type": "websocket.send", "bytes": text.encode("utf-8")})
|
| 178 |
+
|
| 179 |
+
async def close(self, code: int = 1000, reason: str | None = None) -> None:
|
| 180 |
+
await self.send({"type": "websocket.close", "code": code, "reason": reason or ""})
|
| 181 |
+
|
| 182 |
+
async def send_denial_response(self, response: Response) -> None:
|
| 183 |
+
if "websocket.http.response" in self.scope.get("extensions", {}):
|
| 184 |
+
await response(self.scope, self.receive, self.send)
|
| 185 |
+
else:
|
| 186 |
+
raise RuntimeError("The server doesn't support the Websocket Denial Response extension.")
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
class WebSocketClose:
|
| 190 |
+
def __init__(self, code: int = 1000, reason: str | None = None) -> None:
|
| 191 |
+
self.code = code
|
| 192 |
+
self.reason = reason or ""
|
| 193 |
+
|
| 194 |
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
| 195 |
+
await send({"type": "websocket.close", "code": self.code, "reason": self.reason})
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cdist_forward_ops.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _cdist_forward {
|
| 18 |
+
using schema = at::Tensor (const at::Tensor &, const at::Tensor &, double, ::std::optional<int64_t>);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_cdist_forward")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_cdist_forward(Tensor x1, Tensor x2, float p, int? compute_mode) -> Tensor")
|
| 24 |
+
static at::Tensor call(const at::Tensor & x1, const at::Tensor & x2, double p, ::std::optional<int64_t> compute_mode);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & x1, const at::Tensor & x2, double p, ::std::optional<int64_t> compute_mode);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API _cdist_forward_out {
|
| 29 |
+
using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, double, ::std::optional<int64_t>, at::Tensor &);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_cdist_forward")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_cdist_forward.out(Tensor x1, Tensor x2, float p, int? compute_mode, *, Tensor(a!) out) -> Tensor(a!)")
|
| 35 |
+
static at::Tensor & call(const at::Tensor & x1, const at::Tensor & x2, double p, ::std::optional<int64_t> compute_mode, at::Tensor & out);
|
| 36 |
+
static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & x1, const at::Tensor & x2, double p, ::std::optional<int64_t> compute_mode, at::Tensor & out);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
}} // namespace at::_ops
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cholesky_solve_helper.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_cholesky_solve_helper_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_cholesky_solve_helper(Tensor self, Tensor A, bool upper) -> Tensor
|
| 26 |
+
inline at::Tensor _cholesky_solve_helper(const at::Tensor & self, const at::Tensor & A, bool upper) {
|
| 27 |
+
return at::_ops::_cholesky_solve_helper::call(self, A, upper);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_cholesky_solve_helper.out(Tensor self, Tensor A, bool upper, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & _cholesky_solve_helper_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & A, bool upper) {
|
| 32 |
+
return at::_ops::_cholesky_solve_helper_out::call(self, A, upper, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::_cholesky_solve_helper.out(Tensor self, Tensor A, bool upper, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & _cholesky_solve_helper_outf(const at::Tensor & self, const at::Tensor & A, bool upper, at::Tensor & out) {
|
| 36 |
+
return at::_ops::_cholesky_solve_helper_out::call(self, A, upper, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_coalesce_native.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor & _coalesce_out(const at::Tensor & self, at::Tensor & out);
|
| 20 |
+
TORCH_API at::Tensor _coalesce_sparse_cpu(const at::Tensor & self);
|
| 21 |
+
TORCH_API at::Tensor _coalesce_sparse_cuda(const at::Tensor & self);
|
| 22 |
+
} // namespace native
|
| 23 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cudnn_ctc_loss_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor> _cudnn_ctc_loss(const at::Tensor & log_probs, const at::Tensor & targets, at::IntArrayRef input_lengths, at::IntArrayRef target_lengths, int64_t blank, bool deterministic, bool zero_infinity);
|
| 21 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor> _cudnn_ctc_loss(const at::Tensor & log_probs, const at::Tensor & targets, const at::Tensor & input_lengths, const at::Tensor & target_lengths, int64_t blank, bool deterministic, bool zero_infinity);
|
| 22 |
+
|
| 23 |
+
} // namespace cuda
|
| 24 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_cudnn_init_dropout_state_native.h
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor & _cudnn_init_dropout_state_out(double dropout, bool train, int64_t dropout_seed, at::Tensor & out);
|
| 20 |
+
TORCH_API at::Tensor _cudnn_init_dropout_state(double dropout, bool train, int64_t dropout_seed, ::std::optional<at::ScalarType> dtype={}, ::std::optional<at::Layout> layout={}, ::std::optional<at::Device> device={}, ::std::optional<bool> pin_memory={});
|
| 21 |
+
} // namespace native
|
| 22 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_abs.h
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_foreach_abs_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_foreach_abs(Tensor[] self) -> Tensor[]
|
| 26 |
+
inline ::std::vector<at::Tensor> _foreach_abs(at::TensorList self) {
|
| 27 |
+
return at::_ops::_foreach_abs::call(self);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_foreach_abs_(Tensor(a!)[] self) -> ()
|
| 31 |
+
inline void _foreach_abs_(at::TensorList self) {
|
| 32 |
+
return at::_ops::_foreach_abs_::call(self);
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
// aten::_foreach_abs.out(Tensor[] self, *, Tensor(a!)[] out) -> ()
|
| 36 |
+
inline void _foreach_abs_out(at::TensorList out, at::TensorList self) {
|
| 37 |
+
return at::_ops::_foreach_abs_out::call(self, out);
|
| 38 |
+
}
|
| 39 |
+
// aten::_foreach_abs.out(Tensor[] self, *, Tensor(a!)[] out) -> ()
|
| 40 |
+
inline void _foreach_abs_outf(at::TensorList self, at::TensorList out) {
|
| 41 |
+
return at::_ops::_foreach_abs_out::call(self, out);
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
}
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_log1p.h
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_foreach_log1p_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_foreach_log1p(Tensor[] self) -> Tensor[]
|
| 26 |
+
inline ::std::vector<at::Tensor> _foreach_log1p(at::TensorList self) {
|
| 27 |
+
return at::_ops::_foreach_log1p::call(self);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_foreach_log1p_(Tensor(a!)[] self) -> ()
|
| 31 |
+
inline void _foreach_log1p_(at::TensorList self) {
|
| 32 |
+
return at::_ops::_foreach_log1p_::call(self);
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
// aten::_foreach_log1p.out(Tensor[] self, *, Tensor(a!)[] out) -> ()
|
| 36 |
+
inline void _foreach_log1p_out(at::TensorList out, at::TensorList self) {
|
| 37 |
+
return at::_ops::_foreach_log1p_out::call(self, out);
|
| 38 |
+
}
|
| 39 |
+
// aten::_foreach_log1p.out(Tensor[] self, *, Tensor(a!)[] out) -> ()
|
| 40 |
+
inline void _foreach_log1p_outf(at::TensorList self, at::TensorList out) {
|
| 41 |
+
return at::_ops::_foreach_log1p_out::call(self, out);
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
}
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_indices_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor _indices_sparse(const at::Tensor & self);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_log_softmax_backward_data_meta.h
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeMetaFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/TensorIterator.h>
|
| 13 |
+
#include <ATen/TensorMeta.h>
|
| 14 |
+
#include <tuple>
|
| 15 |
+
#include <vector>
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace meta {
|
| 19 |
+
|
| 20 |
+
struct TORCH_API structured__log_softmax_backward_data : public at::impl::MetaBase {
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
void meta(const at::Tensor & grad_output, const at::Tensor & output, int64_t dim, at::ScalarType input_dtype);
|
| 24 |
+
};
|
| 25 |
+
|
| 26 |
+
} // namespace native
|
| 27 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_log_softmax_meta_dispatch.h
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace meta {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor _log_softmax(const at::Tensor & self, int64_t dim, bool half_to_float);
|
| 21 |
+
TORCH_API at::Tensor & _log_softmax_out(at::Tensor & out, const at::Tensor & self, int64_t dim, bool half_to_float);
|
| 22 |
+
TORCH_API at::Tensor & _log_softmax_outf(const at::Tensor & self, int64_t dim, bool half_to_float, at::Tensor & out);
|
| 23 |
+
|
| 24 |
+
} // namespace meta
|
| 25 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_make_dual_copy.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_make_dual_copy_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_make_dual_copy(Tensor primal, Tensor tangent, int level) -> Tensor
|
| 26 |
+
inline at::Tensor _make_dual_copy(const at::Tensor & primal, const at::Tensor & tangent, int64_t level) {
|
| 27 |
+
return at::_ops::_make_dual_copy::call(primal, tangent, level);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_make_dual_copy.out(Tensor primal, Tensor tangent, int level, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & _make_dual_copy_out(at::Tensor & out, const at::Tensor & primal, const at::Tensor & tangent, int64_t level) {
|
| 32 |
+
return at::_ops::_make_dual_copy_out::call(primal, tangent, level, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::_make_dual_copy.out(Tensor primal, Tensor tangent, int level, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & _make_dual_copy_outf(const at::Tensor & primal, const at::Tensor & tangent, int64_t level, at::Tensor & out) {
|
| 36 |
+
return at::_ops::_make_dual_copy_out::call(primal, tangent, level, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_make_per_tensor_quantized_tensor_ops.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _make_per_tensor_quantized_tensor {
|
| 18 |
+
using schema = at::Tensor (const at::Tensor &, double, int64_t);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_make_per_tensor_quantized_tensor")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_make_per_tensor_quantized_tensor(Tensor self, float scale, int zero_point) -> Tensor")
|
| 24 |
+
static at::Tensor call(const at::Tensor & self, double scale, int64_t zero_point);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, double scale, int64_t zero_point);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API _make_per_tensor_quantized_tensor_out {
|
| 29 |
+
using schema = at::Tensor & (const at::Tensor &, double, int64_t, at::Tensor &);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_make_per_tensor_quantized_tensor")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_make_per_tensor_quantized_tensor.out(Tensor self, float scale, int zero_point, *, Tensor(a!) out) -> Tensor(a!)")
|
| 35 |
+
static at::Tensor & call(const at::Tensor & self, double scale, int64_t zero_point, at::Tensor & out);
|
| 36 |
+
static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, double scale, int64_t zero_point, at::Tensor & out);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
}} // namespace at::_ops
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_nested_tensor_softmax_with_shape.h
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_nested_tensor_softmax_with_shape_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_nested_tensor_softmax_with_shape(Tensor self, Tensor query) -> Tensor
|
| 26 |
+
inline at::Tensor _nested_tensor_softmax_with_shape(const at::Tensor & self, const at::Tensor & query) {
|
| 27 |
+
return at::_ops::_nested_tensor_softmax_with_shape::call(self, query);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
}
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_reshape_from_tensor_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor _reshape_from_tensor(const at::Tensor & self, const at::Tensor & shape);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_efficient_attention_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _scaled_dot_product_efficient_attention {
|
| 18 |
+
using schema = ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor> (const at::Tensor &, const at::Tensor &, const at::Tensor &, const ::std::optional<at::Tensor> &, bool, double, bool, ::std::optional<double>);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_scaled_dot_product_efficient_attention")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_scaled_dot_product_efficient_attention(Tensor query, Tensor key, Tensor value, Tensor? attn_bias, bool compute_log_sumexp, float dropout_p=0.0, bool is_causal=False, *, float? scale=None) -> (Tensor output, Tensor log_sumexp, Tensor philox_seed, Tensor philox_offset)")
|
| 24 |
+
static ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor> call(const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const ::std::optional<at::Tensor> & attn_bias, bool compute_log_sumexp, double dropout_p, bool is_causal, ::std::optional<double> scale);
|
| 25 |
+
static ::std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const ::std::optional<at::Tensor> & attn_bias, bool compute_log_sumexp, double dropout_p, bool is_causal, ::std::optional<double> scale);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_to_cpu.h
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_to_cpu_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_to_cpu(Tensor[] tensors) -> Tensor[]
|
| 26 |
+
inline ::std::vector<at::Tensor> _to_cpu(at::TensorList tensors) {
|
| 27 |
+
return at::_ops::_to_cpu::call(tensors);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
}
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/_version_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _version {
|
| 18 |
+
using schema = int64_t (const at::Tensor &);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_version")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_version(Tensor self) -> int")
|
| 24 |
+
static int64_t call(const at::Tensor & self);
|
| 25 |
+
static int64_t redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/adaptive_avg_pool3d_backward_native.h
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor & adaptive_avg_pool3d_backward_out_cpu(const at::Tensor & grad_output, const at::Tensor & self, at::Tensor & grad_input);
|
| 20 |
+
TORCH_API at::Tensor & adaptive_avg_pool3d_backward_out_cuda(const at::Tensor & grad_output, const at::Tensor & self, at::Tensor & grad_input);
|
| 21 |
+
} // namespace native
|
| 22 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/adaptive_max_pool3d_ops.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API adaptive_max_pool3d_out {
|
| 18 |
+
using schema = ::std::tuple<at::Tensor &,at::Tensor &> (const at::Tensor &, at::IntArrayRef, at::Tensor &, at::Tensor &);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::adaptive_max_pool3d")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "adaptive_max_pool3d.out(Tensor self, int[3] output_size, *, Tensor(a!) out, Tensor(b!) indices) -> (Tensor(a!), Tensor(b!))")
|
| 24 |
+
static ::std::tuple<at::Tensor &,at::Tensor &> call(const at::Tensor & self, at::IntArrayRef output_size, at::Tensor & out, at::Tensor & indices);
|
| 25 |
+
static ::std::tuple<at::Tensor &,at::Tensor &> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, at::IntArrayRef output_size, at::Tensor & out, at::Tensor & indices);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API adaptive_max_pool3d {
|
| 29 |
+
using schema = ::std::tuple<at::Tensor,at::Tensor> (const at::Tensor &, at::IntArrayRef);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::adaptive_max_pool3d")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "adaptive_max_pool3d(Tensor self, int[3] output_size) -> (Tensor, Tensor)")
|
| 35 |
+
static ::std::tuple<at::Tensor,at::Tensor> call(const at::Tensor & self, at::IntArrayRef output_size);
|
| 36 |
+
static ::std::tuple<at::Tensor,at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, at::IntArrayRef output_size);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
}} // namespace at::_ops
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/affine_grid_generator_native.h
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor affine_grid_generator(const at::Tensor & theta, at::IntArrayRef size, bool align_corners);
|
| 20 |
+
TORCH_API at::Tensor & affine_grid_generator_out_symint(const at::Tensor & theta, c10::SymIntArrayRef size, bool align_corners, at::Tensor & out);
|
| 21 |
+
} // namespace native
|
| 22 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/align_tensors_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API align_tensors {
|
| 18 |
+
using schema = ::std::vector<at::Tensor> (at::TensorList);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::align_tensors")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "align_tensors(Tensor[] tensors) -> Tensor[]")
|
| 24 |
+
static ::std::vector<at::Tensor> call(at::TensorList tensors);
|
| 25 |
+
static ::std::vector<at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList tensors);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool3d_native.h
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
#include <ATen/ops/avg_pool3d_meta.h>
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
struct TORCH_API structured_avg_pool3d_out_cpu : public at::meta::structured_avg_pool3d {
|
| 20 |
+
void impl(const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride, at::IntArrayRef padding, bool ceil_mode, bool count_include_pad, ::std::optional<int64_t> divisor_override, const at::Tensor & out);
|
| 21 |
+
};
|
| 22 |
+
struct TORCH_API structured_avg_pool3d_out_cuda : public at::meta::structured_avg_pool3d {
|
| 23 |
+
void impl(const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride, at::IntArrayRef padding, bool ceil_mode, bool count_include_pad, ::std::optional<int64_t> divisor_override, const at::Tensor & out);
|
| 24 |
+
};
|
| 25 |
+
TORCH_API at::Tensor mkldnn_avg_pool3d(const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride={}, at::IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, ::std::optional<int64_t> divisor_override=::std::nullopt);
|
| 26 |
+
TORCH_API at::Tensor & mkldnn_avg_pool3d_out(const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride, at::IntArrayRef padding, bool ceil_mode, bool count_include_pad, ::std::optional<int64_t> divisor_override, at::Tensor & out);
|
| 27 |
+
TORCH_API at::Tensor avg_pool3d_quantized_cpu(const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride={}, at::IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, ::std::optional<int64_t> divisor_override=::std::nullopt);
|
| 28 |
+
} // namespace native
|
| 29 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/bucketize_native.h
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor bucketize_cpu(const at::Tensor & self, const at::Tensor & boundaries, bool out_int32=false, bool right=false);
|
| 20 |
+
TORCH_API at::Tensor & bucketize_out_cpu(const at::Tensor & self, const at::Tensor & boundaries, bool out_int32, bool right, at::Tensor & out);
|
| 21 |
+
TORCH_API at::Tensor bucketize_cuda(const at::Tensor & self, const at::Tensor & boundaries, bool out_int32=false, bool right=false);
|
| 22 |
+
TORCH_API at::Tensor & bucketize_out_cuda(const at::Tensor & self, const at::Tensor & boundaries, bool out_int32, bool right, at::Tensor & out);
|
| 23 |
+
TORCH_API at::Tensor & bucketize_Scalar_out(const at::Scalar & self, const at::Tensor & boundaries, bool out_int32, bool right, at::Tensor & out);
|
| 24 |
+
TORCH_API at::Tensor bucketize_cpu(const at::Scalar & self, const at::Tensor & boundaries, bool out_int32=false, bool right=false);
|
| 25 |
+
TORCH_API at::Tensor bucketize_cuda(const at::Scalar & self, const at::Tensor & boundaries, bool out_int32=false, bool right=false);
|
| 26 |
+
} // namespace native
|
| 27 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/cudnn_grid_sampler.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/cudnn_grid_sampler_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::cudnn_grid_sampler(Tensor self, Tensor grid) -> Tensor output
|
| 26 |
+
inline at::Tensor cudnn_grid_sampler(const at::Tensor & self, const at::Tensor & grid) {
|
| 27 |
+
return at::_ops::cudnn_grid_sampler::call(self, grid);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::cudnn_grid_sampler.out(Tensor self, Tensor grid, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & cudnn_grid_sampler_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & grid) {
|
| 32 |
+
return at::_ops::cudnn_grid_sampler_out::call(self, grid, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::cudnn_grid_sampler.out(Tensor self, Tensor grid, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & cudnn_grid_sampler_outf(const at::Tensor & self, const at::Tensor & grid, at::Tensor & out) {
|
| 36 |
+
return at::_ops::cudnn_grid_sampler_out::call(self, grid, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/div_compositeexplicitautogradnonfunctional_dispatch.h
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautogradnonfunctional {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor div(const at::Tensor & self, const at::Tensor & other);
|
| 21 |
+
TORCH_API at::Tensor & div_(at::Tensor & self, const at::Tensor & other);
|
| 22 |
+
TORCH_API at::Tensor div(const at::Tensor & self, const at::Tensor & other, ::std::optional<c10::string_view> rounding_mode);
|
| 23 |
+
TORCH_API at::Tensor & div_(at::Tensor & self, const at::Tensor & other, ::std::optional<c10::string_view> rounding_mode);
|
| 24 |
+
|
| 25 |
+
} // namespace compositeexplicitautogradnonfunctional
|
| 26 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/eq.h
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/eq_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::eq.Scalar_out(Tensor self, Scalar other, *, Tensor(a!) out) -> Tensor(a!)
|
| 26 |
+
inline at::Tensor & eq_out(at::Tensor & out, const at::Tensor & self, const at::Scalar & other) {
|
| 27 |
+
return at::_ops::eq_Scalar_out::call(self, other, out);
|
| 28 |
+
}
|
| 29 |
+
// aten::eq.Scalar_out(Tensor self, Scalar other, *, Tensor(a!) out) -> Tensor(a!)
|
| 30 |
+
inline at::Tensor & eq_outf(const at::Tensor & self, const at::Scalar & other, at::Tensor & out) {
|
| 31 |
+
return at::_ops::eq_Scalar_out::call(self, other, out);
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
// aten::eq.Scalar(Tensor self, Scalar other) -> Tensor
|
| 35 |
+
inline at::Tensor eq(const at::Tensor & self, const at::Scalar & other) {
|
| 36 |
+
return at::_ops::eq_Scalar::call(self, other);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
// aten::eq.Tensor_out(Tensor self, Tensor other, *, Tensor(a!) out) -> Tensor(a!)
|
| 40 |
+
inline at::Tensor & eq_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & other) {
|
| 41 |
+
return at::_ops::eq_Tensor_out::call(self, other, out);
|
| 42 |
+
}
|
| 43 |
+
// aten::eq.Tensor_out(Tensor self, Tensor other, *, Tensor(a!) out) -> Tensor(a!)
|
| 44 |
+
inline at::Tensor & eq_outf(const at::Tensor & self, const at::Tensor & other, at::Tensor & out) {
|
| 45 |
+
return at::_ops::eq_Tensor_out::call(self, other, out);
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
// aten::eq.Tensor(Tensor self, Tensor other) -> Tensor
|
| 49 |
+
inline at::Tensor eq(const at::Tensor & self, const at::Tensor & other) {
|
| 50 |
+
return at::_ops::eq_Tensor::call(self, other);
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
}
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/equal_native.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API bool cpu_equal(const at::Tensor & self, const at::Tensor & other);
|
| 20 |
+
TORCH_API bool cuda_equal(const at::Tensor & self, const at::Tensor & other);
|
| 21 |
+
TORCH_API bool equal_quantized_cpu(const at::Tensor & self, const at::Tensor & other);
|
| 22 |
+
} // namespace native
|
| 23 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/eye_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor & eye_out(at::Tensor & out, int64_t n);
|
| 21 |
+
TORCH_API at::Tensor & eye_outf(int64_t n, at::Tensor & out);
|
| 22 |
+
TORCH_API at::Tensor & eye_symint_out(at::Tensor & out, c10::SymInt n);
|
| 23 |
+
TORCH_API at::Tensor & eye_symint_outf(c10::SymInt n, at::Tensor & out);
|
| 24 |
+
TORCH_API at::Tensor & eye_out(at::Tensor & out, int64_t n, int64_t m);
|
| 25 |
+
TORCH_API at::Tensor & eye_outf(int64_t n, int64_t m, at::Tensor & out);
|
| 26 |
+
TORCH_API at::Tensor & eye_symint_out(at::Tensor & out, c10::SymInt n, c10::SymInt m);
|
| 27 |
+
TORCH_API at::Tensor & eye_symint_outf(c10::SymInt n, c10::SymInt m, at::Tensor & out);
|
| 28 |
+
|
| 29 |
+
} // namespace cuda
|
| 30 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor> fake_quantize_per_tensor_affine_cachemask(const at::Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max);
|
| 21 |
+
|
| 22 |
+
} // namespace cuda
|
| 23 |
+
} // namespace at
|
infer_4_47_1/lib/python3.10/site-packages/torch/include/ATen/ops/fft_fftfreq_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor fft_fftfreq(int64_t n, double d=1.0, at::TensorOptions options={});
|
| 21 |
+
TORCH_API at::Tensor fft_fftfreq(int64_t n, double d, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory);
|
| 22 |
+
TORCH_API at::Tensor & fft_fftfreq_out(at::Tensor & out, int64_t n, double d=1.0);
|
| 23 |
+
TORCH_API at::Tensor & fft_fftfreq_outf(int64_t n, double d, at::Tensor & out);
|
| 24 |
+
|
| 25 |
+
} // namespace compositeexplicitautograd
|
| 26 |
+
} // namespace at
|