David Pomerenke commited on
Commit
6485aff
·
1 Parent(s): b7bd747

Add dev dependencies to requirements.txt

Browse files
Files changed (3) hide show
  1. pyproject.toml +2 -3
  2. requirements.txt +248 -11
  3. uv.lock +0 -2
pyproject.toml CHANGED
@@ -11,11 +11,10 @@ dependencies = [
11
  "numpy>=2.1.2",
12
  "joblib>=1.4.2",
13
  "language-data>=1.3.0",
14
- "tqdm>=4.66.6",
15
  ]
16
 
17
- [tool.uv]
18
- dev-dependencies = [
19
  "aiolimiter>=1.1.0",
20
  "bert-score>=0.3.13",
21
  "elevenlabs>=1.53.0",
 
11
  "numpy>=2.1.2",
12
  "joblib>=1.4.2",
13
  "language-data>=1.3.0",
 
14
  ]
15
 
16
+ [project.optional-dependencies]
17
+ dev = [
18
  "aiolimiter>=1.1.0",
19
  "bert-score>=0.3.13",
20
  "elevenlabs>=1.53.0",
requirements.txt CHANGED
@@ -1,57 +1,294 @@
1
  # This file was autogenerated by uv via the following command:
2
- # uv pip compile pyproject.toml -o requirements.txt
 
 
 
 
 
 
 
 
 
 
3
  annotated-types==0.7.0
4
  # via pydantic
5
  anyio==4.8.0
6
- # via starlette
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  click==8.1.8
8
- # via uvicorn
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  exceptiongroup==1.2.2
10
  # via anyio
11
  fastapi==0.115.8
12
  # via languagebench (pyproject.toml)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  h11==0.14.0
14
- # via uvicorn
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  idna==3.10
16
- # via anyio
 
 
 
 
 
 
 
 
 
 
17
  joblib==1.5.0
18
  # via languagebench (pyproject.toml)
19
- language-data==1.3.0
 
 
20
  # via languagebench (pyproject.toml)
 
 
 
 
 
 
21
  marisa-trie==1.2.1
22
  # via language-data
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  numpy==2.2.3
24
  # via
25
  # languagebench (pyproject.toml)
 
 
 
 
 
26
  # pandas
 
 
 
 
 
 
 
 
 
 
 
 
27
  pandas==2.2.3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  # via languagebench (pyproject.toml)
 
 
29
  pydantic==2.10.6
30
- # via fastapi
 
 
 
31
  pydantic-core==2.27.2
32
- # via pydantic
 
 
 
 
 
 
33
  python-dateutil==2.9.0.post0
34
- # via pandas
 
 
 
 
35
  pytz==2025.1
36
  # via pandas
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  setuptools==75.8.2
38
  # via marisa-trie
39
  six==1.17.0
40
  # via python-dateutil
41
  sniffio==1.3.1
42
- # via anyio
 
 
43
  starlette==0.45.3
44
  # via fastapi
45
- tqdm==4.67.1
 
 
 
 
46
  # via languagebench (pyproject.toml)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  typing-extensions==4.12.2
48
  # via
49
  # anyio
 
50
  # fastapi
 
 
 
51
  # pydantic
52
  # pydantic-core
 
 
53
  # uvicorn
54
  tzdata==2025.1
55
  # via pandas
 
 
 
 
56
  uvicorn==0.34.0
57
  # via languagebench (pyproject.toml)
 
 
 
 
 
 
 
 
 
1
  # This file was autogenerated by uv via the following command:
2
+ # uv pip compile pyproject.toml -o requirements.txt --extra dev
3
+ aiohappyeyeballs==2.6.1
4
+ # via aiohttp
5
+ aiohttp==3.11.18
6
+ # via
7
+ # datasets
8
+ # fsspec
9
+ aiolimiter==1.2.1
10
+ # via languagebench (pyproject.toml)
11
+ aiosignal==1.3.2
12
+ # via aiohttp
13
  annotated-types==0.7.0
14
  # via pydantic
15
  anyio==4.8.0
16
+ # via
17
+ # httpx
18
+ # openai
19
+ # starlette
20
+ async-timeout==5.0.1
21
+ # via aiohttp
22
+ attrs==25.3.0
23
+ # via aiohttp
24
+ bert-score==0.3.13
25
+ # via languagebench (pyproject.toml)
26
+ certifi==2025.4.26
27
+ # via
28
+ # httpcore
29
+ # httpx
30
+ # requests
31
+ charset-normalizer==3.4.2
32
+ # via requests
33
  click==8.1.8
34
+ # via
35
+ # jiwer
36
+ # uvicorn
37
+ colorama==0.4.6
38
+ # via sacrebleu
39
+ contourpy==1.3.2
40
+ # via matplotlib
41
+ cycler==0.12.1
42
+ # via matplotlib
43
+ datasets==3.5.1
44
+ # via evaluate
45
+ dill==0.3.8
46
+ # via
47
+ # datasets
48
+ # evaluate
49
+ # multiprocess
50
+ distro==1.9.0
51
+ # via openai
52
+ elevenlabs==1.57.0
53
+ # via languagebench (pyproject.toml)
54
+ evaluate==0.4.0
55
+ # via languagebench (pyproject.toml)
56
  exceptiongroup==1.2.2
57
  # via anyio
58
  fastapi==0.115.8
59
  # via languagebench (pyproject.toml)
60
+ filelock==3.18.0
61
+ # via
62
+ # datasets
63
+ # huggingface-hub
64
+ # torch
65
+ # transformers
66
+ fonttools==4.57.0
67
+ # via matplotlib
68
+ frozenlist==1.6.0
69
+ # via
70
+ # aiohttp
71
+ # aiosignal
72
+ fsspec==2025.3.0
73
+ # via
74
+ # datasets
75
+ # evaluate
76
+ # huggingface-hub
77
+ # torch
78
  h11==0.14.0
79
+ # via
80
+ # httpcore
81
+ # uvicorn
82
+ httpcore==1.0.8
83
+ # via httpx
84
+ httpx==0.28.1
85
+ # via
86
+ # elevenlabs
87
+ # openai
88
+ huggingface-hub==0.30.2
89
+ # via
90
+ # languagebench (pyproject.toml)
91
+ # datasets
92
+ # evaluate
93
+ # tokenizers
94
+ # transformers
95
  idna==3.10
96
+ # via
97
+ # anyio
98
+ # httpx
99
+ # requests
100
+ # yarl
101
+ jinja2==3.1.6
102
+ # via torch
103
+ jiter==0.9.0
104
+ # via openai
105
+ jiwer==3.1.0
106
+ # via languagebench (pyproject.toml)
107
  joblib==1.5.0
108
  # via languagebench (pyproject.toml)
109
+ kiwisolver==1.4.8
110
+ # via matplotlib
111
+ langcodes==3.5.0
112
  # via languagebench (pyproject.toml)
113
+ language-data==1.3.0
114
+ # via
115
+ # languagebench (pyproject.toml)
116
+ # langcodes
117
+ lxml==5.4.0
118
+ # via sacrebleu
119
  marisa-trie==1.2.1
120
  # via language-data
121
+ markdown-it-py==3.0.0
122
+ # via rich
123
+ markupsafe==3.0.2
124
+ # via jinja2
125
+ matplotlib==3.10.1
126
+ # via bert-score
127
+ mdurl==0.1.2
128
+ # via markdown-it-py
129
+ mpmath==1.3.0
130
+ # via sympy
131
+ multidict==6.4.3
132
+ # via
133
+ # aiohttp
134
+ # yarl
135
+ multiprocess==0.70.16
136
+ # via
137
+ # datasets
138
+ # evaluate
139
+ networkx==3.4.2
140
+ # via torch
141
  numpy==2.2.3
142
  # via
143
  # languagebench (pyproject.toml)
144
+ # bert-score
145
+ # contourpy
146
+ # datasets
147
+ # evaluate
148
+ # matplotlib
149
  # pandas
150
+ # sacrebleu
151
+ # transformers
152
+ openai==1.77.0
153
+ # via languagebench (pyproject.toml)
154
+ packaging==25.0
155
+ # via
156
+ # bert-score
157
+ # datasets
158
+ # evaluate
159
+ # huggingface-hub
160
+ # matplotlib
161
+ # transformers
162
  pandas==2.2.3
163
+ # via
164
+ # languagebench (pyproject.toml)
165
+ # bert-score
166
+ # datasets
167
+ # evaluate
168
+ pillow==11.2.1
169
+ # via matplotlib
170
+ portalocker==3.1.1
171
+ # via sacrebleu
172
+ propcache==0.3.1
173
+ # via
174
+ # aiohttp
175
+ # yarl
176
+ protobuf==6.30.2
177
  # via languagebench (pyproject.toml)
178
+ pyarrow==20.0.0
179
+ # via datasets
180
  pydantic==2.10.6
181
+ # via
182
+ # elevenlabs
183
+ # fastapi
184
+ # openai
185
  pydantic-core==2.27.2
186
+ # via
187
+ # elevenlabs
188
+ # pydantic
189
+ pygments==2.19.1
190
+ # via rich
191
+ pyparsing==3.2.3
192
+ # via matplotlib
193
  python-dateutil==2.9.0.post0
194
+ # via
195
+ # matplotlib
196
+ # pandas
197
+ python-dotenv==1.1.0
198
+ # via languagebench (pyproject.toml)
199
  pytz==2025.1
200
  # via pandas
201
+ pyyaml==6.0.2
202
+ # via
203
+ # datasets
204
+ # huggingface-hub
205
+ # transformers
206
+ rapidfuzz==3.13.0
207
+ # via jiwer
208
+ regex==2024.11.6
209
+ # via
210
+ # sacrebleu
211
+ # tiktoken
212
+ # transformers
213
+ requests==2.32.3
214
+ # via
215
+ # bert-score
216
+ # datasets
217
+ # elevenlabs
218
+ # evaluate
219
+ # huggingface-hub
220
+ # responses
221
+ # tiktoken
222
+ # transformers
223
+ responses==0.18.0
224
+ # via evaluate
225
+ rich==14.0.0
226
+ # via languagebench (pyproject.toml)
227
+ sacrebleu==2.5.1
228
+ # via languagebench (pyproject.toml)
229
+ safetensors==0.5.3
230
+ # via transformers
231
+ sentencepiece==0.2.0
232
+ # via languagebench (pyproject.toml)
233
  setuptools==75.8.2
234
  # via marisa-trie
235
  six==1.17.0
236
  # via python-dateutil
237
  sniffio==1.3.1
238
+ # via
239
+ # anyio
240
+ # openai
241
  starlette==0.45.3
242
  # via fastapi
243
+ sympy==1.14.0
244
+ # via torch
245
+ tabulate==0.9.0
246
+ # via sacrebleu
247
+ tiktoken==0.9.0
248
  # via languagebench (pyproject.toml)
249
+ tokenizers==0.21.1
250
+ # via transformers
251
+ torch==2.7.0
252
+ # via bert-score
253
+ tqdm==4.67.1
254
+ # via
255
+ # languagebench (pyproject.toml)
256
+ # bert-score
257
+ # datasets
258
+ # evaluate
259
+ # huggingface-hub
260
+ # openai
261
+ # transformers
262
+ transformers==4.51.3
263
+ # via
264
+ # languagebench (pyproject.toml)
265
+ # bert-score
266
  typing-extensions==4.12.2
267
  # via
268
  # anyio
269
+ # elevenlabs
270
  # fastapi
271
+ # huggingface-hub
272
+ # multidict
273
+ # openai
274
  # pydantic
275
  # pydantic-core
276
+ # rich
277
+ # torch
278
  # uvicorn
279
  tzdata==2025.1
280
  # via pandas
281
+ urllib3==2.4.0
282
+ # via
283
+ # requests
284
+ # responses
285
  uvicorn==0.34.0
286
  # via languagebench (pyproject.toml)
287
+ websockets==15.0.1
288
+ # via elevenlabs
289
+ xxhash==3.5.0
290
+ # via
291
+ # datasets
292
+ # evaluate
293
+ yarl==1.20.0
294
+ # via aiohttp
uv.lock CHANGED
@@ -849,7 +849,6 @@ dependencies = [
849
  { name = "language-data" },
850
  { name = "numpy" },
851
  { name = "pandas" },
852
- { name = "tqdm" },
853
  { name = "uvicorn" },
854
  ]
855
 
@@ -882,7 +881,6 @@ requires-dist = [
882
  { name = "language-data", specifier = ">=1.3.0" },
883
  { name = "numpy", specifier = ">=2.1.2" },
884
  { name = "pandas", specifier = ">=2.2.3" },
885
- { name = "tqdm" },
886
  { name = "uvicorn", specifier = ">=0.34.0" },
887
  ]
888
 
 
849
  { name = "language-data" },
850
  { name = "numpy" },
851
  { name = "pandas" },
 
852
  { name = "uvicorn" },
853
  ]
854
 
 
881
  { name = "language-data", specifier = ">=1.3.0" },
882
  { name = "numpy", specifier = ">=2.1.2" },
883
  { name = "pandas", specifier = ">=2.2.3" },
 
884
  { name = "uvicorn", specifier = ">=0.34.0" },
885
  ]
886