Commit
·
cd8454d
1
Parent(s):
8c8537f
first push of codes and models for g2p, t2u, tokenizer and detokenizer
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +14 -0
- .gitignore +138 -0
- 00004557-00000030.wav +3 -0
- 004892.wav +3 -0
- LICENSE +202 -0
- README.md +45 -3
- README_CN.md +48 -0
- Third_Party_Open_Source_Software_Notice +1289 -0
- evaluation/README.md +9 -0
- evaluation/eval_detok_en.py +269 -0
- evaluation/eval_detok_zh.py +144 -0
- evaluation/eval_sim.py +188 -0
- evaluation/patch_unispeech.py +131 -0
- evaluation/patch_utils.py +122 -0
- evaluation/requirements_sim.txt +7 -0
- evaluation/requirements_wer.txt +12 -0
- figs/CADiT.jpg +3 -0
- figs/F5-streaming.jpg +3 -0
- figs/TTS.jpg +3 -0
- figs/eval1.jpg +3 -0
- figs/eval2.jpg +3 -0
- figs/eval3.jpg +3 -0
- figs/reconstruction.jpg +3 -0
- figs/tokenizer.jpg +3 -0
- install_requirements.sh +5 -0
- reconstuction_example.py +178 -0
- requirements_npu.txt +18 -0
- semantic_detokenizer/__init__.py +0 -0
- semantic_detokenizer/chunk_infer.py +478 -0
- semantic_detokenizer/ckpt/model.pt +3 -0
- semantic_detokenizer/ckpt/model.pt.md5 +1 -0
- semantic_detokenizer/ckpt/model.yaml +51 -0
- semantic_detokenizer/ckpt/vocab_4096.txt +4096 -0
- semantic_detokenizer/f5tts_npu_patch.py +106 -0
- semantic_detokenizer/model/__init__.py +0 -0
- semantic_detokenizer/model/cadit.py +263 -0
- semantic_detokenizer/model/modules.py +223 -0
- semantic_detokenizer/patch_utils.py +122 -0
- semantic_detokenizer/requirements.txt +13 -0
- semantic_detokenizer/utils_infer.py +298 -0
- semantic_tokenizer/__init__.py +0 -0
- semantic_tokenizer/f40ms/README.md +7 -0
- semantic_tokenizer/f40ms/__init__.py +0 -0
- semantic_tokenizer/f40ms/ckpt/model.pt +3 -0
- semantic_tokenizer/f40ms/ckpt/model.pt.md5 +1 -0
- semantic_tokenizer/f40ms/config/dict.km.txt +1000 -0
- semantic_tokenizer/f40ms/config/hubert_config.yaml +344 -0
- semantic_tokenizer/f40ms/fairseq_npu_patch.py +34 -0
- semantic_tokenizer/f40ms/infer_for_eval.py +140 -0
- semantic_tokenizer/f40ms/models/__init__.py +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,17 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
semantic_detokenizer/ckpt/model.pt filter=lfs diff=lfs merge=lfs -text
|
37 |
+
semantic_tokenizer/f40ms/ckpt/model.pt filter=lfs diff=lfs merge=lfs -text
|
38 |
+
text2token/ckpt/40ms.checkpoint15.pt filter=lfs diff=lfs merge=lfs -text
|
39 |
+
thirdparty/G2P/text/ja_userdic/userdict.csv filter=lfs diff=lfs merge=lfs -text
|
40 |
+
figs/CADiT.jpg filter=lfs diff=lfs merge=lfs -text
|
41 |
+
figs/F5-streaming.jpg filter=lfs diff=lfs merge=lfs -text
|
42 |
+
figs/TTS.jpg filter=lfs diff=lfs merge=lfs -text
|
43 |
+
figs/eval1.jpg filter=lfs diff=lfs merge=lfs -text
|
44 |
+
figs/eval2.jpg filter=lfs diff=lfs merge=lfs -text
|
45 |
+
figs/eval3.jpg filter=lfs diff=lfs merge=lfs -text
|
46 |
+
figs/reconstruction.jpg filter=lfs diff=lfs merge=lfs -text
|
47 |
+
figs/tokenizer.jpg filter=lfs diff=lfs merge=lfs -text
|
48 |
+
004892.wav filter=lfs diff=lfs merge=lfs -text
|
49 |
+
00004557-00000030.wav filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
build/
|
12 |
+
develop-eggs/
|
13 |
+
dist/
|
14 |
+
downloads/
|
15 |
+
eggs/
|
16 |
+
.eggs/
|
17 |
+
lib/
|
18 |
+
lib64/
|
19 |
+
parts/
|
20 |
+
sdist/
|
21 |
+
var/
|
22 |
+
wheels/
|
23 |
+
share/python-wheels/
|
24 |
+
*.egg-info/
|
25 |
+
.installed.cfg
|
26 |
+
*.egg
|
27 |
+
MANIFEST
|
28 |
+
|
29 |
+
# PyInstaller
|
30 |
+
# Usually these files are written by a python script from a template
|
31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
32 |
+
*.manifest
|
33 |
+
*.spec
|
34 |
+
|
35 |
+
# Installer logs
|
36 |
+
pip-log.txt
|
37 |
+
pip-delete-this-directory.txt
|
38 |
+
|
39 |
+
# Unit test / coverage reports
|
40 |
+
htmlcov/
|
41 |
+
.tox/
|
42 |
+
.nox/
|
43 |
+
.coverage
|
44 |
+
.coverage.*
|
45 |
+
.cache
|
46 |
+
nosetests.xml
|
47 |
+
coverage.xml
|
48 |
+
*.cover
|
49 |
+
*.py,cover
|
50 |
+
.hypothesis/
|
51 |
+
.pytest_cache/
|
52 |
+
cover/
|
53 |
+
|
54 |
+
# Translations
|
55 |
+
*.mo
|
56 |
+
*.pot
|
57 |
+
|
58 |
+
# Django stuff:
|
59 |
+
*.log
|
60 |
+
local_settings.py
|
61 |
+
db.sqlite3
|
62 |
+
db.sqlite3-journal
|
63 |
+
|
64 |
+
# Flask stuff:
|
65 |
+
instance/
|
66 |
+
.webassets-cache
|
67 |
+
|
68 |
+
# Scrapy stuff:
|
69 |
+
.scrapy
|
70 |
+
|
71 |
+
# Sphinx documentation
|
72 |
+
docs/_build/
|
73 |
+
|
74 |
+
# PyBuilder
|
75 |
+
.pybuilder/
|
76 |
+
target/
|
77 |
+
|
78 |
+
# Jupyter Notebook
|
79 |
+
.ipynb_checkpoints
|
80 |
+
|
81 |
+
# IPython
|
82 |
+
profile_default/
|
83 |
+
ipython_config.py
|
84 |
+
|
85 |
+
# pyenv
|
86 |
+
# For a library or package, you might want to ignore these files since the code is
|
87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
88 |
+
# .python-version
|
89 |
+
|
90 |
+
# pipenv
|
91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
94 |
+
# install all needed dependencies.
|
95 |
+
#Pipfile.lock
|
96 |
+
|
97 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
98 |
+
__pypackages__/
|
99 |
+
|
100 |
+
# Celery stuff
|
101 |
+
celerybeat-schedule
|
102 |
+
celerybeat.pid
|
103 |
+
|
104 |
+
# SageMath parsed files
|
105 |
+
*.sage.py
|
106 |
+
|
107 |
+
# Environments
|
108 |
+
.env
|
109 |
+
.venv
|
110 |
+
env/
|
111 |
+
venv/
|
112 |
+
ENV/
|
113 |
+
env.bak/
|
114 |
+
venv.bak/
|
115 |
+
|
116 |
+
# Spyder project settings
|
117 |
+
.spyderproject
|
118 |
+
.spyproject
|
119 |
+
|
120 |
+
# Rope project settings
|
121 |
+
.ropeproject
|
122 |
+
|
123 |
+
# mkdocs documentation
|
124 |
+
/site
|
125 |
+
|
126 |
+
# mypy
|
127 |
+
.mypy_cache/
|
128 |
+
.dmypy.json
|
129 |
+
dmypy.json
|
130 |
+
|
131 |
+
# Pyre type checker
|
132 |
+
.pyre/
|
133 |
+
|
134 |
+
# pytype static type analyzer
|
135 |
+
.pytype/
|
136 |
+
|
137 |
+
# Cython debug symbols
|
138 |
+
cython_debug/
|
00004557-00000030.wav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8a3b3747139278b3e4318d24e2e844cdbe534602d95fa5430786721d14480a9b
|
3 |
+
size 249388
|
004892.wav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:255a96ce54c0b502b2aa785a4b1fdf0729d4c8975236fc4a1ba325c09637bf79
|
3 |
+
size 297358
|
LICENSE
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
Apache License
|
3 |
+
Version 2.0, January 2004
|
4 |
+
http://www.apache.org/licenses/
|
5 |
+
|
6 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
7 |
+
|
8 |
+
1. Definitions.
|
9 |
+
|
10 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
11 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
12 |
+
|
13 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
14 |
+
the copyright owner that is granting the License.
|
15 |
+
|
16 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
17 |
+
other entities that control, are controlled by, or are under common
|
18 |
+
control with that entity. For the purposes of this definition,
|
19 |
+
"control" means (i) the power, direct or indirect, to cause the
|
20 |
+
direction or management of such entity, whether by contract or
|
21 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
22 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
23 |
+
|
24 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
25 |
+
exercising permissions granted by this License.
|
26 |
+
|
27 |
+
"Source" form shall mean the preferred form for making modifications,
|
28 |
+
including but not limited to software source code, documentation
|
29 |
+
source, and configuration files.
|
30 |
+
|
31 |
+
"Object" form shall mean any form resulting from mechanical
|
32 |
+
transformation or translation of a Source form, including but
|
33 |
+
not limited to compiled object code, generated documentation,
|
34 |
+
and conversions to other media types.
|
35 |
+
|
36 |
+
"Work" shall mean the work of authorship, whether in Source or
|
37 |
+
Object form, made available under the License, as indicated by a
|
38 |
+
copyright notice that is included in or attached to the work
|
39 |
+
(an example is provided in the Appendix below).
|
40 |
+
|
41 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
42 |
+
form, that is based on (or derived from) the Work and for which the
|
43 |
+
editorial revisions, annotations, elaborations, or other modifications
|
44 |
+
represent, as a whole, an original work of authorship. For the purposes
|
45 |
+
of this License, Derivative Works shall not include works that remain
|
46 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
47 |
+
the Work and Derivative Works thereof.
|
48 |
+
|
49 |
+
"Contribution" shall mean any work of authorship, including
|
50 |
+
the original version of the Work and any modifications or additions
|
51 |
+
to that Work or Derivative Works thereof, that is intentionally
|
52 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
53 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
54 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
55 |
+
means any form of electronic, verbal, or written communication sent
|
56 |
+
to the Licensor or its representatives, including but not limited to
|
57 |
+
communication on electronic mailing lists, source code control systems,
|
58 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
59 |
+
Licensor for the purpose of discussing and improving the Work, but
|
60 |
+
excluding communication that is conspicuously marked or otherwise
|
61 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
62 |
+
|
63 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
64 |
+
on behalf of whom a Contribution has been received by Licensor and
|
65 |
+
subsequently incorporated within the Work.
|
66 |
+
|
67 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
68 |
+
this License, each Contributor hereby grants to You a perpetual,
|
69 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
70 |
+
copyright license to reproduce, prepare Derivative Works of,
|
71 |
+
publicly display, publicly perform, sublicense, and distribute the
|
72 |
+
Work and such Derivative Works in Source or Object form.
|
73 |
+
|
74 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
75 |
+
this License, each Contributor hereby grants to You a perpetual,
|
76 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
77 |
+
(except as stated in this section) patent license to make, have made,
|
78 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
79 |
+
where such license applies only to those patent claims licensable
|
80 |
+
by such Contributor that are necessarily infringed by their
|
81 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
82 |
+
with the Work to which such Contribution(s) was submitted. If You
|
83 |
+
institute patent litigation against any entity (including a
|
84 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
85 |
+
or a Contribution incorporated within the Work constitutes direct
|
86 |
+
or contributory patent infringement, then any patent licenses
|
87 |
+
granted to You under this License for that Work shall terminate
|
88 |
+
as of the date such litigation is filed.
|
89 |
+
|
90 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
91 |
+
Work or Derivative Works thereof in any medium, with or without
|
92 |
+
modifications, and in Source or Object form, provided that You
|
93 |
+
meet the following conditions:
|
94 |
+
|
95 |
+
(a) You must give any other recipients of the Work or
|
96 |
+
Derivative Works a copy of this License; and
|
97 |
+
|
98 |
+
(b) You must cause any modified files to carry prominent notices
|
99 |
+
stating that You changed the files; and
|
100 |
+
|
101 |
+
(c) You must retain, in the Source form of any Derivative Works
|
102 |
+
that You distribute, all copyright, patent, trademark, and
|
103 |
+
attribution notices from the Source form of the Work,
|
104 |
+
excluding those notices that do not pertain to any part of
|
105 |
+
the Derivative Works; and
|
106 |
+
|
107 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
108 |
+
distribution, then any Derivative Works that You distribute must
|
109 |
+
include a readable copy of the attribution notices contained
|
110 |
+
within such NOTICE file, excluding those notices that do not
|
111 |
+
pertain to any part of the Derivative Works, in at least one
|
112 |
+
of the following places: within a NOTICE text file distributed
|
113 |
+
as part of the Derivative Works; within the Source form or
|
114 |
+
documentation, if provided along with the Derivative Works; or,
|
115 |
+
within a display generated by the Derivative Works, if and
|
116 |
+
wherever such third-party notices normally appear. The contents
|
117 |
+
of the NOTICE file are for informational purposes only and
|
118 |
+
do not modify the License. You may add Your own attribution
|
119 |
+
notices within Derivative Works that You distribute, alongside
|
120 |
+
or as an addendum to the NOTICE text from the Work, provided
|
121 |
+
that such additional attribution notices cannot be construed
|
122 |
+
as modifying the License.
|
123 |
+
|
124 |
+
You may add Your own copyright statement to Your modifications and
|
125 |
+
may provide additional or different license terms and conditions
|
126 |
+
for use, reproduction, or distribution of Your modifications, or
|
127 |
+
for any such Derivative Works as a whole, provided Your use,
|
128 |
+
reproduction, and distribution of the Work otherwise complies with
|
129 |
+
the conditions stated in this License.
|
130 |
+
|
131 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
132 |
+
any Contribution intentionally submitted for inclusion in the Work
|
133 |
+
by You to the Licensor shall be under the terms and conditions of
|
134 |
+
this License, without any additional terms or conditions.
|
135 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
136 |
+
the terms of any separate license agreement you may have executed
|
137 |
+
with Licensor regarding such Contributions.
|
138 |
+
|
139 |
+
6. Trademarks. This License does not grant permission to use the trade
|
140 |
+
names, trademarks, service marks, or product names of the Licensor,
|
141 |
+
except as required for reasonable and customary use in describing the
|
142 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
143 |
+
|
144 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
145 |
+
agreed to in writing, Licensor provides the Work (and each
|
146 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
147 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
148 |
+
implied, including, without limitation, any warranties or conditions
|
149 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
150 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
151 |
+
appropriateness of using or redistributing the Work and assume any
|
152 |
+
risks associated with Your exercise of permissions under this License.
|
153 |
+
|
154 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
155 |
+
whether in tort (including negligence), contract, or otherwise,
|
156 |
+
unless required by applicable law (such as deliberate and grossly
|
157 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
158 |
+
liable to You for damages, including any direct, indirect, special,
|
159 |
+
incidental, or consequential damages of any character arising as a
|
160 |
+
result of this License or out of the use or inability to use the
|
161 |
+
Work (including but not limited to damages for loss of goodwill,
|
162 |
+
work stoppage, computer failure or malfunction, or any and all
|
163 |
+
other commercial damages or losses), even if such Contributor
|
164 |
+
has been advised of the possibility of such damages.
|
165 |
+
|
166 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
167 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
168 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
169 |
+
or other liability obligations and/or rights consistent with this
|
170 |
+
License. However, in accepting such obligations, You may act only
|
171 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
172 |
+
of any other Contributor, and only if You agree to indemnify,
|
173 |
+
defend, and hold each Contributor harmless for any liability
|
174 |
+
incurred by, or claims asserted against, such Contributor by reason
|
175 |
+
of your accepting any such warranty or additional liability.
|
176 |
+
|
177 |
+
END OF TERMS AND CONDITIONS
|
178 |
+
|
179 |
+
APPENDIX: How to apply the Apache License to your work.
|
180 |
+
|
181 |
+
To apply the Apache License to your work, attach the following
|
182 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
183 |
+
replaced with your own identifying information. (Don't include
|
184 |
+
the brackets!) The text should be enclosed in the appropriate
|
185 |
+
comment syntax for the file format. We also recommend that a
|
186 |
+
file or class name and description of purpose be included on the
|
187 |
+
same "printed page" as the copyright notice for easier
|
188 |
+
identification within third-party archives.
|
189 |
+
|
190 |
+
Copyright [yyyy] [name of copyright owner]
|
191 |
+
|
192 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
193 |
+
you may not use this file except in compliance with the License.
|
194 |
+
You may obtain a copy of the License at
|
195 |
+
|
196 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
197 |
+
|
198 |
+
Unless required by applicable law or agreed to in writing, software
|
199 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
200 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
201 |
+
See the License for the specific language governing permissions and
|
202 |
+
limitations under the License.
|
README.md
CHANGED
@@ -1,3 +1,45 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Discrete Speech Tokenization Toolkit
|
2 |
+
## Release Notes:
|
3 |
+
V1.0
|
4 |
+
|
5 |
+
This release consists of the following models:
|
6 |
+
1. A speech semantic tokenzier (25Hz, codebook size=4096) for Chinese and English
|
7 |
+
2. A corresponding speech detokenizer for for Chinese and English
|
8 |
+
3. A text2token model (T2U) converts text to speech tokens.
|
9 |
+
|
10 |
+
these models achieved top-tier performance on TTS and speech reconstruction on the seed-tts-eval dataset:
|
11 |
+
<p align="center"><img src="figs/eval1.jpg" width="1200"></p>
|
12 |
+
<p align="center"><img src="figs/eval2.jpg" width="1200"></p>
|
13 |
+
|
14 |
+
We also evaluated the ASR performance of our semantic tokenizer using a LLM as backbone. Our model achieve comparable performance to models that use continuous speech representation.
|
15 |
+
<p align="center"><img src="figs/eval3.jpg" width="1200"></p>
|
16 |
+
|
17 |
+
## Speech Semantic Tokenizer
|
18 |
+
the speech semantic tokenizer is train with labelled data.
|
19 |
+
<p align="center"><img src="figs/tokenizer.jpg" width="800"></p>
|
20 |
+
|
21 |
+
|
22 |
+
## Speech Detokenizer
|
23 |
+
Our speech detokenizer is developed based on [F5-TTS](https://github.com/SWivid/F5-TTS) with two major updates added.
|
24 |
+
1. we adopt DiT block with cross attention, which is similar to the detokenizer of [GLM-4-Voice](https://github.com/zai-org/GLM-4-Voice).
|
25 |
+
<p align="center"><img src="figs/CADiT.jpg" height="600"></p>
|
26 |
+
|
27 |
+
2. we intruduced a chunk-wise streaming inferece process that can generate speech of any length.
|
28 |
+
<p align="center"><img src="figs/F5-streaming.jpg" width="1200"></p>
|
29 |
+
|
30 |
+
## Text2Token(T2U)
|
31 |
+
Text2Token is transformer machine translation model, which is trained on about 380k hours of speech-text pairs with [fairseq](https://github.com/facebookresearch/fairseq).
|
32 |
+
|
33 |
+
## TTS pipeline
|
34 |
+
As shown in tts_example.py, the 3 models could form a pipeline for TTS task.
|
35 |
+
<p align="center"><img src="figs/TTS.jpg" width="1200"></p>
|
36 |
+
|
37 |
+
## Non-parallel Speech Reconstruction Pipeline
|
38 |
+
As shown in reconstruction_example.py, the tokenizer and detokenizer could form a pipeline for speech reconstruction task.
|
39 |
+
<p align="center"><img src="figs/reconstruction.jpg" width="1200"></p>
|
40 |
+
|
41 |
+
# Core Developers:
|
42 |
+
[Daxin Tan]([email protected]), [Dehua Tao]([email protected]), [Yusen Sun]([email protected]) and [Xiao Chen]([email protected])
|
43 |
+
|
44 |
+
## Contributors:
|
45 |
+
[Hanlin Zhang]([email protected])
|
README_CN.md
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Discrete Speech Tokenization Toolkit
|
2 |
+
## Release Notes:
|
3 |
+
V1.0
|
4 |
+
|
5 |
+
本次发布的工具包包括一下几个模型:
|
6 |
+
1. 一个语音tokenizer(25Hz, codebook size=4096)支持中英文
|
7 |
+
2. 一个对应的detokenizer支持中英文
|
8 |
+
3. 一个text2token模型(T2U)可将文本转换为语音token.
|
9 |
+
|
10 |
+
这些模型在seed-tts-eval数据集的TTS和语音重建任务上上达到了一流的精度:
|
11 |
+
<p align="center"><img src="figs/eval1.jpg" width="1200"></p>
|
12 |
+
<p align="center"><img src="figs/eval2.jpg" width="1200"></p>
|
13 |
+
|
14 |
+
我们基于LLM测试了本语音tokenizer的ASR精度,我们的tokenizer达到了与采用连续语音表征的模型相近的水平.
|
15 |
+
<p align="center"><img src="figs/eval3.jpg" width="1200"></p>
|
16 |
+
|
17 |
+
## Speech Semantic Tokenizer
|
18 |
+
本tokenizer的采用了监督学习方法。训练中我们使用了从开源数据中采样的大约4000小时中英文语音文本对数据,语种比例为1:1.
|
19 |
+
<p align="center"><img src="figs/tokenizer.jpg" width="800"></p>
|
20 |
+
|
21 |
+
|
22 |
+
## Speech Detokenizer
|
23 |
+
本detokenizer是基于[F5-TTS](https://github.com/SWivid/F5-TTS)开发,但是对齐进行了两项改进:
|
24 |
+
1. 采用了DiT with cross attention,类似于[GLM-4-Voice](https://github.com/zai-org/GLM-4-Voice)的detokenizer.
|
25 |
+
<p align="center"><img src="figs/CADiT.jpg" height="600"></p>
|
26 |
+
|
27 |
+
2. 开发了以chunk为单元的流式推理流程.
|
28 |
+
<p align="center"><img src="figs/F5-streaming.jpg" width="1200"></p>
|
29 |
+
|
30 |
+
本次发布的detokenizer使用了约6000小时的中英文数据包括Wenet4TTS(premium,standard),LibriTTS等等。
|
31 |
+
|
32 |
+
## Text2Token(T2U)
|
33 |
+
Text2Token是一个transformer机器翻译模型,用[fairseq](https://github.com/facebookresearch/fairseq)在约38万小时的语音文本对数据上训练得到。
|
34 |
+
|
35 |
+
|
36 |
+
## TTS pipeline
|
37 |
+
在tts_example.py中, 我们给出了一个实例,串联使用上述三个模型实现TTS的功能.
|
38 |
+
<p align="center"><img src="figs/TTS.jpg" width="1200"></p>
|
39 |
+
|
40 |
+
## Non-parallel Speech Reconstruction Pipeline
|
41 |
+
在reconstruction_example.py中, 有另外一个实例,串联使用tokenizer和detokenizer实现语音重建的功能.
|
42 |
+
<p align="center"><img src="figs/reconstruction.jpg" width="1200"></p>
|
43 |
+
|
44 |
+
# Core Developers:
|
45 |
+
[Daxin Tan]([email protected]), [Dehua Tao]([email protected]), [Yusen Sun]([email protected]) and [Xiao Chen]([email protected])
|
46 |
+
|
47 |
+
## Contributors:
|
48 |
+
[Hanlin Zhang]([email protected])
|
Third_Party_Open_Source_Software_Notice
ADDED
@@ -0,0 +1,1289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
OPEN SOURCE SOFTWARE NOTICE FOR LIST
|
2 |
+
|
3 |
+
This document contains licenses and copyright notices for third-party software components used in the list project, which are legally required to be disclosed under their respective open source licenses.
|
4 |
+
|
5 |
+
The below components are provided under separate licenses. The applicable license terms for each component are provided in the corresponding license text below. These licenses are granted by the original copyright holders.
|
6 |
+
|
7 |
+
When using the open source software components contained in list, the applicable open source license(s) shall prevail over any other license terms that may be contained in this distribution, including but not limited to: Apache-2.0, BSD-3-Clause, MIT, NOASSERTION.
|
8 |
+
|
9 |
+
WARRANTY DISCLAIMER
|
10 |
+
|
11 |
+
THE OPEN SOURCE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
12 |
+
|
13 |
+
For full warranty details and limitations, refer to the individual license texts provided below.
|
14 |
+
|
15 |
+
================================================================================
|
16 |
+
|
17 |
+
[SoftWare] fairseq 0.12.2
|
18 |
+
|
19 |
+
Copyright Notice(s):
|
20 |
+
• Copyright (c) Facebook, Inc. and its affiliates.
|
21 |
+
|
22 |
+
License: MIT License
|
23 |
+
|
24 |
+
Full License Text:
|
25 |
+
Copyright (c) Facebook, Inc. and its affiliates.
|
26 |
+
|
27 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
28 |
+
of this software and associated documentation files (the "Software"), to deal
|
29 |
+
in the Software without restriction, including without limitation the rights
|
30 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
31 |
+
copies of the Software, and to permit persons to whom the Software is
|
32 |
+
furnished to do so, subject to the following conditions:
|
33 |
+
|
34 |
+
The above copyright notice and this permission notice shall be included in all
|
35 |
+
copies or substantial portions of the Software.
|
36 |
+
|
37 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
38 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
39 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
40 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
41 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
42 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
43 |
+
SOFTWARE.
|
44 |
+
|
45 |
+
================================================================================
|
46 |
+
|
47 |
+
[SoftWare] f5-tts 0.12.2
|
48 |
+
|
49 |
+
Copyright Notice(s):
|
50 |
+
• Copyright (c) 2024 Yushen CHEN
|
51 |
+
|
52 |
+
License: MIT License
|
53 |
+
|
54 |
+
Full License Text:
|
55 |
+
Copyright (c) 2024 Yushen CHEN
|
56 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
57 |
+
of this software and associated documentation files (the "Software"), to deal
|
58 |
+
in the Software without restriction, including without limitation the rights
|
59 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
60 |
+
copies of the Software, and to permit persons to whom the Software is
|
61 |
+
furnished to do so, subject to the following conditions:
|
62 |
+
|
63 |
+
The above copyright notice and this permission notice shall be included in all
|
64 |
+
copies or substantial portions of the Software.
|
65 |
+
|
66 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
67 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
68 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
69 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
70 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
71 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
72 |
+
SOFTWARE.
|
73 |
+
|
74 |
+
================================================================================
|
75 |
+
|
76 |
+
[SoftWare] GPT-SoVITS
|
77 |
+
|
78 |
+
Copyright Notice(s):
|
79 |
+
• Copyright (c) 2024 RVC-Boss
|
80 |
+
|
81 |
+
License: MIT License
|
82 |
+
|
83 |
+
Full License Text:
|
84 |
+
Copyright (c) 2024 RVC-Boss
|
85 |
+
|
86 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
87 |
+
of this software and associated documentation files (the "Software"), to deal
|
88 |
+
in the Software without restriction, including without limitation the rights
|
89 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
90 |
+
copies of the Software, and to permit persons to whom the Software is
|
91 |
+
furnished to do so, subject to the following conditions:
|
92 |
+
|
93 |
+
The above copyright notice and this permission notice shall be included in all
|
94 |
+
copies or substantial portions of the Software.
|
95 |
+
|
96 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
97 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
98 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
99 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
100 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
101 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
102 |
+
SOFTWARE.
|
103 |
+
|
104 |
+
================================================================================
|
105 |
+
|
106 |
+
[SoftWare] WeTextProcessing
|
107 |
+
|
108 |
+
Copyright Notice(s):
|
109 |
+
• Copyright (c) 2022 Zhendong Peng ([email protected])
|
110 |
+
|
111 |
+
License: Apache License V2.0
|
112 |
+
|
113 |
+
Full License Text:
|
114 |
+
Apache License
|
115 |
+
Version 2.0, January 2004
|
116 |
+
http://www.apache.org/licenses/
|
117 |
+
|
118 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
119 |
+
|
120 |
+
1. Definitions.
|
121 |
+
|
122 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
123 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
124 |
+
|
125 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
126 |
+
the copyright owner that is granting the License.
|
127 |
+
|
128 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
129 |
+
other entities that control, are controlled by, or are under common
|
130 |
+
control with that entity. For the purposes of this definition,
|
131 |
+
"control" means (i) the power, direct or indirect, to cause the
|
132 |
+
direction or management of such entity, whether by contract or
|
133 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
134 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
135 |
+
|
136 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
137 |
+
exercising permissions granted by this License.
|
138 |
+
|
139 |
+
"Source" form shall mean the preferred form for making modifications,
|
140 |
+
including but not limited to software source code, documentation
|
141 |
+
source, and configuration files.
|
142 |
+
|
143 |
+
"Object" form shall mean any form resulting from mechanical
|
144 |
+
transformation or translation of a Source form, including but
|
145 |
+
not limited to compiled object code, generated documentation,
|
146 |
+
and conversions to other media types.
|
147 |
+
|
148 |
+
"Work" shall mean the work of authorship, whether in Source or
|
149 |
+
Object form, made available under the License, as indicated by a
|
150 |
+
copyright notice that is included in or attached to the work
|
151 |
+
(an example is provided in the Appendix below).
|
152 |
+
|
153 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
154 |
+
form, that is based on (or derived from) the Work and for which the
|
155 |
+
editorial revisions, annotations, elaborations, or other modifications
|
156 |
+
represent, as a whole, an original work of authorship. For the purposes
|
157 |
+
of this License, Derivative Works shall not include works that remain
|
158 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
159 |
+
the Work and Derivative Works thereof.
|
160 |
+
|
161 |
+
"Contribution" shall mean any work of authorship, including
|
162 |
+
the original version of the Work and any modifications or additions
|
163 |
+
to that Work or Derivative Works thereof, that is intentionally
|
164 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
165 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
166 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
167 |
+
means any form of electronic, verbal, or written communication sent
|
168 |
+
to the Licensor or its representatives, including but not limited to
|
169 |
+
communication on electronic mailing lists, source code control systems,
|
170 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
171 |
+
Licensor for the purpose of discussing and improving the Work, but
|
172 |
+
excluding communication that is conspicuously marked or otherwise
|
173 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
174 |
+
|
175 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
176 |
+
on behalf of whom a Contribution has been received by Licensor and
|
177 |
+
subsequently incorporated within the Work.
|
178 |
+
|
179 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
180 |
+
this License, each Contributor hereby grants to You a perpetual,
|
181 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
182 |
+
copyright license to reproduce, prepare Derivative Works of,
|
183 |
+
publicly display, publicly perform, sublicense, and distribute the
|
184 |
+
Work and such Derivative Works in Source or Object form.
|
185 |
+
|
186 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
187 |
+
this License, each Contributor hereby grants to You a perpetual,
|
188 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
189 |
+
(except as stated in this section) patent license to make, have made,
|
190 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
191 |
+
where such license applies only to those patent claims licensable
|
192 |
+
by such Contributor that are necessarily infringed by their
|
193 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
194 |
+
with the Work to which such Contribution(s) was submitted. If You
|
195 |
+
institute patent litigation against any entity (including a
|
196 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
197 |
+
or a Contribution incorporated within the Work constitutes direct
|
198 |
+
or contributory patent infringement, then any patent licenses
|
199 |
+
granted to You under this License for that Work shall terminate
|
200 |
+
as of the date such litigation is filed.
|
201 |
+
|
202 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
203 |
+
Work or Derivative Works thereof in any medium, with or without
|
204 |
+
modifications, and in Source or Object form, provided that You
|
205 |
+
meet the following conditions:
|
206 |
+
|
207 |
+
(a) You must give any other recipients of the Work or
|
208 |
+
Derivative Works a copy of this License; and
|
209 |
+
|
210 |
+
(b) You must cause any modified files to carry prominent notices
|
211 |
+
stating that You changed the files; and
|
212 |
+
|
213 |
+
(c) You must retain, in the Source form of any Derivative Works
|
214 |
+
that You distribute, all copyright, patent, trademark, and
|
215 |
+
attribution notices from the Source form of the Work,
|
216 |
+
excluding those notices that do not pertain to any part of
|
217 |
+
the Derivative Works; and
|
218 |
+
|
219 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
220 |
+
distribution, then any Derivative Works that You distribute must
|
221 |
+
include a readable copy of the attribution notices contained
|
222 |
+
within such NOTICE file, excluding those notices that do not
|
223 |
+
pertain to any part of the Derivative Works, in at least one
|
224 |
+
of the following places: within a NOTICE text file distributed
|
225 |
+
as part of the Derivative Works; within the Source form or
|
226 |
+
documentation, if provided along with the Derivative Works; or,
|
227 |
+
within a display generated by the Derivative Works, if and
|
228 |
+
wherever such third-party notices normally appear. The contents
|
229 |
+
of the NOTICE file are for informational purposes only and
|
230 |
+
do not modify the License. You may add Your own attribution
|
231 |
+
notices within Derivative Works that You distribute, alongside
|
232 |
+
or as an addendum to the NOTICE text from the Work, provided
|
233 |
+
that such additional attribution notices cannot be construed
|
234 |
+
as modifying the License.
|
235 |
+
|
236 |
+
You may add Your own copyright statement to Your modifications and
|
237 |
+
may provide additional or different license terms and conditions
|
238 |
+
for use, reproduction, or distribution of Your modifications, or
|
239 |
+
for any such Derivative Works as a whole, provided Your use,
|
240 |
+
reproduction, and distribution of the Work otherwise complies with
|
241 |
+
the conditions stated in this License.
|
242 |
+
|
243 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
244 |
+
any Contribution intentionally submitted for inclusion in the Work
|
245 |
+
by You to the Licensor shall be under the terms and conditions of
|
246 |
+
this License, without any additional terms or conditions.
|
247 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
248 |
+
the terms of any separate license agreement you may have executed
|
249 |
+
with Licensor regarding such Contributions.
|
250 |
+
|
251 |
+
6. Trademarks. This License does not grant permission to use the trade
|
252 |
+
names, trademarks, service marks, or product names of the Licensor,
|
253 |
+
except as required for reasonable and customary use in describing the
|
254 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
255 |
+
|
256 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
257 |
+
agreed to in writing, Licensor provides the Work (and each
|
258 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
259 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
260 |
+
implied, including, without limitation, any warranties or conditions
|
261 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
262 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
263 |
+
appropriateness of using or redistributing the Work and assume any
|
264 |
+
risks associated with Your exercise of permissions under this License.
|
265 |
+
|
266 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
267 |
+
whether in tort (including negligence), contract, or otherwise,
|
268 |
+
unless required by applicable law (such as deliberate and grossly
|
269 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
270 |
+
liable to You for damages, including any direct, indirect, special,
|
271 |
+
incidental, or consequential damages of any character arising as a
|
272 |
+
result of this License or out of the use or inability to use the
|
273 |
+
Work (including but not limited to damages for loss of goodwill,
|
274 |
+
work stoppage, computer failure or malfunction, or any and all
|
275 |
+
other commercial damages or losses), even if such Contributor
|
276 |
+
has been advised of the possibility of such damages.
|
277 |
+
|
278 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
279 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
280 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
281 |
+
or other liability obligations and/or rights consistent with this
|
282 |
+
License. However, in accepting such obligations, You may act only
|
283 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
284 |
+
of any other Contributor, and only if You agree to indemnify,
|
285 |
+
defend, and hold each Contributor harmless for any liability
|
286 |
+
incurred by, or claims asserted against, such Contributor by reason
|
287 |
+
of your accepting any such warranty or additional liability.
|
288 |
+
|
289 |
+
END OF TERMS AND CONDITIONS
|
290 |
+
|
291 |
+
================================================================================
|
292 |
+
|
293 |
+
[SoftWare] icefall 1.0
|
294 |
+
|
295 |
+
Copyright Notice(s):
|
296 |
+
• Copyright (c) 2021-2024 Xiaomi Corp.
|
297 |
+
|
298 |
+
License: Apache License V2.0
|
299 |
+
|
300 |
+
Full License Text:
|
301 |
+
Apache License
|
302 |
+
Version 2.0, January 2004
|
303 |
+
http://www.apache.org/licenses/
|
304 |
+
|
305 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
306 |
+
|
307 |
+
1. Definitions.
|
308 |
+
|
309 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
310 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
311 |
+
|
312 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
313 |
+
the copyright owner that is granting the License.
|
314 |
+
|
315 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
316 |
+
other entities that control, are controlled by, or are under common
|
317 |
+
control with that entity. For the purposes of this definition,
|
318 |
+
"control" means (i) the power, direct or indirect, to cause the
|
319 |
+
direction or management of such entity, whether by contract or
|
320 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
321 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
322 |
+
|
323 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
324 |
+
exercising permissions granted by this License.
|
325 |
+
|
326 |
+
"Source" form shall mean the preferred form for making modifications,
|
327 |
+
including but not limited to software source code, documentation
|
328 |
+
source, and configuration files.
|
329 |
+
|
330 |
+
"Object" form shall mean any form resulting from mechanical
|
331 |
+
transformation or translation of a Source form, including but
|
332 |
+
not limited to compiled object code, generated documentation,
|
333 |
+
and conversions to other media types.
|
334 |
+
|
335 |
+
"Work" shall mean the work of authorship, whether in Source or
|
336 |
+
Object form, made available under the License, as indicated by a
|
337 |
+
copyright notice that is included in or attached to the work
|
338 |
+
(an example is provided in the Appendix below).
|
339 |
+
|
340 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
341 |
+
form, that is based on (or derived from) the Work and for which the
|
342 |
+
editorial revisions, annotations, elaborations, or other modifications
|
343 |
+
represent, as a whole, an original work of authorship. For the purposes
|
344 |
+
of this License, Derivative Works shall not include works that remain
|
345 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
346 |
+
the Work and Derivative Works thereof.
|
347 |
+
|
348 |
+
"Contribution" shall mean any work of authorship, including
|
349 |
+
the original version of the Work and any modifications or additions
|
350 |
+
to that Work or Derivative Works thereof, that is intentionally
|
351 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
352 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
353 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
354 |
+
means any form of electronic, verbal, or written communication sent
|
355 |
+
to the Licensor or its representatives, including but not limited to
|
356 |
+
communication on electronic mailing lists, source code control systems,
|
357 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
358 |
+
Licensor for the purpose of discussing and improving the Work, but
|
359 |
+
excluding communication that is conspicuously marked or otherwise
|
360 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
361 |
+
|
362 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
363 |
+
on behalf of whom a Contribution has been received by Licensor and
|
364 |
+
subsequently incorporated within the Work.
|
365 |
+
|
366 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
367 |
+
this License, each Contributor hereby grants to You a perpetual,
|
368 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
369 |
+
copyright license to reproduce, prepare Derivative Works of,
|
370 |
+
publicly display, publicly perform, sublicense, and distribute the
|
371 |
+
Work and such Derivative Works in Source or Object form.
|
372 |
+
|
373 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
374 |
+
this License, each Contributor hereby grants to You a perpetual,
|
375 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
376 |
+
(except as stated in this section) patent license to make, have made,
|
377 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
378 |
+
where such license applies only to those patent claims licensable
|
379 |
+
by such Contributor that are necessarily infringed by their
|
380 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
381 |
+
with the Work to which such Contribution(s) was submitted. If You
|
382 |
+
institute patent litigation against any entity (including a
|
383 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
384 |
+
or a Contribution incorporated within the Work constitutes direct
|
385 |
+
or contributory patent infringement, then any patent licenses
|
386 |
+
granted to You under this License for that Work shall terminate
|
387 |
+
as of the date such litigation is filed.
|
388 |
+
|
389 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
390 |
+
Work or Derivative Works thereof in any medium, with or without
|
391 |
+
modifications, and in Source or Object form, provided that You
|
392 |
+
meet the following conditions:
|
393 |
+
|
394 |
+
(a) You must give any other recipients of the Work or
|
395 |
+
Derivative Works a copy of this License; and
|
396 |
+
|
397 |
+
(b) You must cause any modified files to carry prominent notices
|
398 |
+
stating that You changed the files; and
|
399 |
+
|
400 |
+
(c) You must retain, in the Source form of any Derivative Works
|
401 |
+
that You distribute, all copyright, patent, trademark, and
|
402 |
+
attribution notices from the Source form of the Work,
|
403 |
+
excluding those notices that do not pertain to any part of
|
404 |
+
the Derivative Works; and
|
405 |
+
|
406 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
407 |
+
distribution, then any Derivative Works that You distribute must
|
408 |
+
include a readable copy of the attribution notices contained
|
409 |
+
within such NOTICE file, excluding those notices that do not
|
410 |
+
pertain to any part of the Derivative Works, in at least one
|
411 |
+
of the following places: within a NOTICE text file distributed
|
412 |
+
as part of the Derivative Works; within the Source form or
|
413 |
+
documentation, if provided along with the Derivative Works; or,
|
414 |
+
within a display generated by the Derivative Works, if and
|
415 |
+
wherever such third-party notices normally appear. The contents
|
416 |
+
of the NOTICE file are for informational purposes only and
|
417 |
+
do not modify the License. You may add Your own attribution
|
418 |
+
notices within Derivative Works that You distribute, alongside
|
419 |
+
or as an addendum to the NOTICE text from the Work, provided
|
420 |
+
that such additional attribution notices cannot be construed
|
421 |
+
as modifying the License.
|
422 |
+
|
423 |
+
You may add Your own copyright statement to Your modifications and
|
424 |
+
may provide additional or different license terms and conditions
|
425 |
+
for use, reproduction, or distribution of Your modifications, or
|
426 |
+
for any such Derivative Works as a whole, provided Your use,
|
427 |
+
reproduction, and distribution of the Work otherwise complies with
|
428 |
+
the conditions stated in this License.
|
429 |
+
|
430 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
431 |
+
any Contribution intentionally submitted for inclusion in the Work
|
432 |
+
by You to the Licensor shall be under the terms and conditions of
|
433 |
+
this License, without any additional terms or conditions.
|
434 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
435 |
+
the terms of any separate license agreement you may have executed
|
436 |
+
with Licensor regarding such Contributions.
|
437 |
+
|
438 |
+
6. Trademarks. This License does not grant permission to use the trade
|
439 |
+
names, trademarks, service marks, or product names of the Licensor,
|
440 |
+
except as required for reasonable and customary use in describing the
|
441 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
442 |
+
|
443 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
444 |
+
agreed to in writing, Licensor provides the Work (and each
|
445 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
446 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
447 |
+
implied, including, without limitation, any warranties or conditions
|
448 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
449 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
450 |
+
appropriateness of using or redistributing the Work and assume any
|
451 |
+
risks associated with Your exercise of permissions under this License.
|
452 |
+
|
453 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
454 |
+
whether in tort (including negligence), contract, or otherwise,
|
455 |
+
unless required by applicable law (such as deliberate and grossly
|
456 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
457 |
+
liable to You for damages, including any direct, indirect, special,
|
458 |
+
incidental, or consequential damages of any character arising as a
|
459 |
+
result of this License or out of the use or inability to use the
|
460 |
+
Work (including but not limited to damages for loss of goodwill,
|
461 |
+
work stoppage, computer failure or malfunction, or any and all
|
462 |
+
other commercial damages or losses), even if such Contributor
|
463 |
+
has been advised of the possibility of such damages.
|
464 |
+
|
465 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
466 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
467 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
468 |
+
or other liability obligations and/or rights consistent with this
|
469 |
+
License. However, in accepting such obligations, You may act only
|
470 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
471 |
+
of any other Contributor, and only if You agree to indemnify,
|
472 |
+
defend, and hold each Contributor harmless for any liability
|
473 |
+
incurred by, or claims asserted against, such Contributor by reason
|
474 |
+
of your accepting any such warranty or additional liability.
|
475 |
+
|
476 |
+
END OF TERMS AND CONDITIONS
|
477 |
+
|
478 |
+
|
479 |
+
================================================================================
|
480 |
+
|
481 |
+
[SoftWare] espnet 0.10.5
|
482 |
+
|
483 |
+
Copyright Notice(s):
|
484 |
+
• Copyright 2017 Johns Hopkins University
|
485 |
+
|
486 |
+
License: Apache License V2.0
|
487 |
+
|
488 |
+
Full License Text:
|
489 |
+
Apache License
|
490 |
+
Version 2.0, January 2004
|
491 |
+
http://www.apache.org/licenses/
|
492 |
+
|
493 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
494 |
+
|
495 |
+
1. Definitions.
|
496 |
+
|
497 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
498 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
499 |
+
|
500 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
501 |
+
the copyright owner that is granting the License.
|
502 |
+
|
503 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
504 |
+
other entities that control, are controlled by, or are under common
|
505 |
+
control with that entity. For the purposes of this definition,
|
506 |
+
"control" means (i) the power, direct or indirect, to cause the
|
507 |
+
direction or management of such entity, whether by contract or
|
508 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
509 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
510 |
+
|
511 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
512 |
+
exercising permissions granted by this License.
|
513 |
+
|
514 |
+
"Source" form shall mean the preferred form for making modifications,
|
515 |
+
including but not limited to software source code, documentation
|
516 |
+
source, and configuration files.
|
517 |
+
|
518 |
+
"Object" form shall mean any form resulting from mechanical
|
519 |
+
transformation or translation of a Source form, including but
|
520 |
+
not limited to compiled object code, generated documentation,
|
521 |
+
and conversions to other media types.
|
522 |
+
|
523 |
+
"Work" shall mean the work of authorship, whether in Source or
|
524 |
+
Object form, made available under the License, as indicated by a
|
525 |
+
copyright notice that is included in or attached to the work
|
526 |
+
(an example is provided in the Appendix below).
|
527 |
+
|
528 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
529 |
+
form, that is based on (or derived from) the Work and for which the
|
530 |
+
editorial revisions, annotations, elaborations, or other modifications
|
531 |
+
represent, as a whole, an original work of authorship. For the purposes
|
532 |
+
of this License, Derivative Works shall not include works that remain
|
533 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
534 |
+
the Work and Derivative Works thereof.
|
535 |
+
|
536 |
+
"Contribution" shall mean any work of authorship, including
|
537 |
+
the original version of the Work and any modifications or additions
|
538 |
+
to that Work or Derivative Works thereof, that is intentionally
|
539 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
540 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
541 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
542 |
+
means any form of electronic, verbal, or written communication sent
|
543 |
+
to the Licensor or its representatives, including but not limited to
|
544 |
+
communication on electronic mailing lists, source code control systems,
|
545 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
546 |
+
Licensor for the purpose of discussing and improving the Work, but
|
547 |
+
excluding communication that is conspicuously marked or otherwise
|
548 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
549 |
+
|
550 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
551 |
+
on behalf of whom a Contribution has been received by Licensor and
|
552 |
+
subsequently incorporated within the Work.
|
553 |
+
|
554 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
555 |
+
this License, each Contributor hereby grants to You a perpetual,
|
556 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
557 |
+
copyright license to reproduce, prepare Derivative Works of,
|
558 |
+
publicly display, publicly perform, sublicense, and distribute the
|
559 |
+
Work and such Derivative Works in Source or Object form.
|
560 |
+
|
561 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
562 |
+
this License, each Contributor hereby grants to You a perpetual,
|
563 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
564 |
+
(except as stated in this section) patent license to make, have made,
|
565 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
566 |
+
where such license applies only to those patent claims licensable
|
567 |
+
by such Contributor that are necessarily infringed by their
|
568 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
569 |
+
with the Work to which such Contribution(s) was submitted. If You
|
570 |
+
institute patent litigation against any entity (including a
|
571 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
572 |
+
or a Contribution incorporated within the Work constitutes direct
|
573 |
+
or contributory patent infringement, then any patent licenses
|
574 |
+
granted to You under this License for that Work shall terminate
|
575 |
+
as of the date such litigation is filed.
|
576 |
+
|
577 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
578 |
+
Work or Derivative Works thereof in any medium, with or without
|
579 |
+
modifications, and in Source or Object form, provided that You
|
580 |
+
meet the following conditions:
|
581 |
+
|
582 |
+
(a) You must give any other recipients of the Work or
|
583 |
+
Derivative Works a copy of this License; and
|
584 |
+
|
585 |
+
(b) You must cause any modified files to carry prominent notices
|
586 |
+
stating that You changed the files; and
|
587 |
+
|
588 |
+
(c) You must retain, in the Source form of any Derivative Works
|
589 |
+
that You distribute, all copyright, patent, trademark, and
|
590 |
+
attribution notices from the Source form of the Work,
|
591 |
+
excluding those notices that do not pertain to any part of
|
592 |
+
the Derivative Works; and
|
593 |
+
|
594 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
595 |
+
distribution, then any Derivative Works that You distribute must
|
596 |
+
include a readable copy of the attribution notices contained
|
597 |
+
within such NOTICE file, excluding those notices that do not
|
598 |
+
pertain to any part of the Derivative Works, in at least one
|
599 |
+
of the following places: within a NOTICE text file distributed
|
600 |
+
as part of the Derivative Works; within the Source form or
|
601 |
+
documentation, if provided along with the Derivative Works; or,
|
602 |
+
within a display generated by the Derivative Works, if and
|
603 |
+
wherever such third-party notices normally appear. The contents
|
604 |
+
of the NOTICE file are for informational purposes only and
|
605 |
+
do not modify the License. You may add Your own attribution
|
606 |
+
notices within Derivative Works that You distribute, alongside
|
607 |
+
or as an addendum to the NOTICE text from the Work, provided
|
608 |
+
that such additional attribution notices cannot be construed
|
609 |
+
as modifying the License.
|
610 |
+
|
611 |
+
You may add Your own copyright statement to Your modifications and
|
612 |
+
may provide additional or different license terms and conditions
|
613 |
+
for use, reproduction, or distribution of Your modifications, or
|
614 |
+
for any such Derivative Works as a whole, provided Your use,
|
615 |
+
reproduction, and distribution of the Work otherwise complies with
|
616 |
+
the conditions stated in this License.
|
617 |
+
|
618 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
619 |
+
any Contribution intentionally submitted for inclusion in the Work
|
620 |
+
by You to the Licensor shall be under the terms and conditions of
|
621 |
+
this License, without any additional terms or conditions.
|
622 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
623 |
+
the terms of any separate license agreement you may have executed
|
624 |
+
with Licensor regarding such Contributions.
|
625 |
+
|
626 |
+
6. Trademarks. This License does not grant permission to use the trade
|
627 |
+
names, trademarks, service marks, or product names of the Licensor,
|
628 |
+
except as required for reasonable and customary use in describing the
|
629 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
630 |
+
|
631 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
632 |
+
agreed to in writing, Licensor provides the Work (and each
|
633 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
634 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
635 |
+
implied, including, without limitation, any warranties or conditions
|
636 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
637 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
638 |
+
appropriateness of using or redistributing the Work and assume any
|
639 |
+
risks associated with Your exercise of permissions under this License.
|
640 |
+
|
641 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
642 |
+
whether in tort (including negligence), contract, or otherwise,
|
643 |
+
unless required by applicable law (such as deliberate and grossly
|
644 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
645 |
+
liable to You for damages, including any direct, indirect, special,
|
646 |
+
incidental, or consequential damages of any character arising as a
|
647 |
+
result of this License or out of the use or inability to use the
|
648 |
+
Work (including but not limited to damages for loss of goodwill,
|
649 |
+
work stoppage, computer failure or malfunction, or any and all
|
650 |
+
other commercial damages or losses), even if such Contributor
|
651 |
+
has been advised of the possibility of such damages.
|
652 |
+
|
653 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
654 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
655 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
656 |
+
or other liability obligations and/or rights consistent with this
|
657 |
+
License. However, in accepting such obligations, You may act only
|
658 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
659 |
+
of any other Contributor, and only if You agree to indemnify,
|
660 |
+
defend, and hold each Contributor harmless for any liability
|
661 |
+
incurred by, or claims asserted against, such Contributor by reason
|
662 |
+
of your accepting any such warranty or additional liability.
|
663 |
+
|
664 |
+
END OF TERMS AND CONDITIONS
|
665 |
+
|
666 |
+
================================================================================
|
667 |
+
|
668 |
+
[SoftWare] vixen build-560
|
669 |
+
|
670 |
+
License: freeware
|
671 |
+
|
672 |
+
Full License Text:
|
673 |
+
|
674 |
+
Vixen is unlicensed and free to use. This software is provided 'as-is', without any express or implied warranty.
|
675 |
+
In no event will the authors or any contributors be held liable for any damages or liabilities arising from the use of this software.
|
676 |
+
|
677 |
+
Vixen uses many 3rd party libraries that are required to be present on the path for the full functionality. Some of those have licenses that can be found as follows.
|
678 |
+
|
679 |
+
NLog http://raw.github.com/NLog/NLog/master/LICENSE.txt
|
680 |
+
NLog.Config http://raw.github.com/NLog/NLog/master/LICENSE.txt
|
681 |
+
NLog.Schema http://raw.github.com/NLog/NLog/master/LICENSE.txt
|
682 |
+
DockPanelSuite http://www.opensource.org/licenses/mit-license.php
|
683 |
+
DockPanelSuite.ThemeVS2015 http://www.opensource.org/licenses/mit-license.php
|
684 |
+
Newtonsoft.Json https://raw.github.com/JamesNK/Newtonsoft.Json/master/LICENSE.md
|
685 |
+
Nowin http://www.opensource.org/licenses/mit-license.php
|
686 |
+
Owin https://github.com/owin-contrib/owin-hosting/blob/master/LICENSE.txt
|
687 |
+
NShape https://nshape.codeplex.com/license
|
688 |
+
ZedGraph https://www.gnu.org/licenses/lgpl-2.1.txt
|
689 |
+
Accord.Net http://accord-framework.net/license.txt
|
690 |
+
Accord.Video.FFMPEG http://accord-framework.net/gpl.txt
|
691 |
+
|
692 |
+
Some portions of the Effect editor code are based on work that is licensed under the Apache 2.0 license.
|
693 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
694 |
+
|
695 |
+
NumericTextBox is derived from NumericalTextBox control licensed under the Apache 2.0 License. http://www.apache.org/licenses/LICENSE-2.0
|
696 |
+
|
697 |
+
================================================================================
|
698 |
+
|
699 |
+
[SoftWare] g2pK 3bb9d5a
|
700 |
+
|
701 |
+
License: Apache License V2.0
|
702 |
+
|
703 |
+
Full License Text:
|
704 |
+
Apache License
|
705 |
+
Version 2.0, January 2004
|
706 |
+
http://www.apache.org/licenses/
|
707 |
+
|
708 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
709 |
+
|
710 |
+
1. Definitions.
|
711 |
+
|
712 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
713 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
714 |
+
|
715 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
716 |
+
the copyright owner that is granting the License.
|
717 |
+
|
718 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
719 |
+
other entities that control, are controlled by, or are under common
|
720 |
+
control with that entity. For the purposes of this definition,
|
721 |
+
"control" means (i) the power, direct or indirect, to cause the
|
722 |
+
direction or management of such entity, whether by contract or
|
723 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
724 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
725 |
+
|
726 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
727 |
+
exercising permissions granted by this License.
|
728 |
+
|
729 |
+
"Source" form shall mean the preferred form for making modifications,
|
730 |
+
including but not limited to software source code, documentation
|
731 |
+
source, and configuration files.
|
732 |
+
|
733 |
+
"Object" form shall mean any form resulting from mechanical
|
734 |
+
transformation or translation of a Source form, including but
|
735 |
+
not limited to compiled object code, generated documentation,
|
736 |
+
and conversions to other media types.
|
737 |
+
|
738 |
+
"Work" shall mean the work of authorship, whether in Source or
|
739 |
+
Object form, made available under the License, as indicated by a
|
740 |
+
copyright notice that is included in or attached to the work
|
741 |
+
(an example is provided in the Appendix below).
|
742 |
+
|
743 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
744 |
+
form, that is based on (or derived from) the Work and for which the
|
745 |
+
editorial revisions, annotations, elaborations, or other modifications
|
746 |
+
represent, as a whole, an original work of authorship. For the purposes
|
747 |
+
of this License, Derivative Works shall not include works that remain
|
748 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
749 |
+
the Work and Derivative Works thereof.
|
750 |
+
|
751 |
+
"Contribution" shall mean any work of authorship, including
|
752 |
+
the original version of the Work and any modifications or additions
|
753 |
+
to that Work or Derivative Works thereof, that is intentionally
|
754 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
755 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
756 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
757 |
+
means any form of electronic, verbal, or written communication sent
|
758 |
+
to the Licensor or its representatives, including but not limited to
|
759 |
+
communication on electronic mailing lists, source code control systems,
|
760 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
761 |
+
Licensor for the purpose of discussing and improving the Work, but
|
762 |
+
excluding communication that is conspicuously marked or otherwise
|
763 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
764 |
+
|
765 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
766 |
+
on behalf of whom a Contribution has been received by Licensor and
|
767 |
+
subsequently incorporated within the Work.
|
768 |
+
|
769 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
770 |
+
this License, each Contributor hereby grants to You a perpetual,
|
771 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
772 |
+
copyright license to reproduce, prepare Derivative Works of,
|
773 |
+
publicly display, publicly perform, sublicense, and distribute the
|
774 |
+
Work and such Derivative Works in Source or Object form.
|
775 |
+
|
776 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
777 |
+
this License, each Contributor hereby grants to You a perpetual,
|
778 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
779 |
+
(except as stated in this section) patent license to make, have made,
|
780 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
781 |
+
where such license applies only to those patent claims licensable
|
782 |
+
by such Contributor that are necessarily infringed by their
|
783 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
784 |
+
with the Work to which such Contribution(s) was submitted. If You
|
785 |
+
institute patent litigation against any entity (including a
|
786 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
787 |
+
or a Contribution incorporated within the Work constitutes direct
|
788 |
+
or contributory patent infringement, then any patent licenses
|
789 |
+
granted to You under this License for that Work shall terminate
|
790 |
+
as of the date such litigation is filed.
|
791 |
+
|
792 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
793 |
+
Work or Derivative Works thereof in any medium, with or without
|
794 |
+
modifications, and in Source or Object form, provided that You
|
795 |
+
meet the following conditions:
|
796 |
+
|
797 |
+
(a) You must give any other recipients of the Work or
|
798 |
+
Derivative Works a copy of this License; and
|
799 |
+
|
800 |
+
(b) You must cause any modified files to carry prominent notices
|
801 |
+
stating that You changed the files; and
|
802 |
+
|
803 |
+
(c) You must retain, in the Source form of any Derivative Works
|
804 |
+
that You distribute, all copyright, patent, trademark, and
|
805 |
+
attribution notices from the Source form of the Work,
|
806 |
+
excluding those notices that do not pertain to any part of
|
807 |
+
the Derivative Works; and
|
808 |
+
|
809 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
810 |
+
distribution, then any Derivative Works that You distribute must
|
811 |
+
include a readable copy of the attribution notices contained
|
812 |
+
within such NOTICE file, excluding those notices that do not
|
813 |
+
pertain to any part of the Derivative Works, in at least one
|
814 |
+
of the following places: within a NOTICE text file distributed
|
815 |
+
as part of the Derivative Works; within the Source form or
|
816 |
+
documentation, if provided along with the Derivative Works; or,
|
817 |
+
within a display generated by the Derivative Works, if and
|
818 |
+
wherever such third-party notices normally appear. The contents
|
819 |
+
of the NOTICE file are for informational purposes only and
|
820 |
+
do not modify the License. You may add Your own attribution
|
821 |
+
notices within Derivative Works that You distribute, alongside
|
822 |
+
or as an addendum to the NOTICE text from the Work, provided
|
823 |
+
that such additional attribution notices cannot be construed
|
824 |
+
as modifying the License.
|
825 |
+
|
826 |
+
You may add Your own copyright statement to Your modifications and
|
827 |
+
may provide additional or different license terms and conditions
|
828 |
+
for use, reproduction, or distribution of Your modifications, or
|
829 |
+
for any such Derivative Works as a whole, provided Your use,
|
830 |
+
reproduction, and distribution of the Work otherwise complies with
|
831 |
+
the conditions stated in this License.
|
832 |
+
|
833 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
834 |
+
any Contribution intentionally submitted for inclusion in the Work
|
835 |
+
by You to the Licensor shall be under the terms and conditions of
|
836 |
+
this License, without any additional terms or conditions.
|
837 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
838 |
+
the terms of any separate license agreement you may have executed
|
839 |
+
with Licensor regarding such Contributions.
|
840 |
+
|
841 |
+
6. Trademarks. This License does not grant permission to use the trade
|
842 |
+
names, trademarks, service marks, or product names of the Licensor,
|
843 |
+
except as required for reasonable and customary use in describing the
|
844 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
845 |
+
|
846 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
847 |
+
agreed to in writing, Licensor provides the Work (and each
|
848 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
849 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
850 |
+
implied, including, without limitation, any warranties or conditions
|
851 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
852 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
853 |
+
appropriateness of using or redistributing the Work and assume any
|
854 |
+
risks associated with Your exercise of permissions under this License.
|
855 |
+
|
856 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
857 |
+
whether in tort (including negligence), contract, or otherwise,
|
858 |
+
unless required by applicable law (such as deliberate and grossly
|
859 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
860 |
+
liable to You for damages, including any direct, indirect, special,
|
861 |
+
incidental, or consequential damages of any character arising as a
|
862 |
+
result of this License or out of the use or inability to use the
|
863 |
+
Work (including but not limited to damages for loss of goodwill,
|
864 |
+
work stoppage, computer failure or malfunction, or any and all
|
865 |
+
other commercial damages or losses), even if such Contributor
|
866 |
+
has been advised of the possibility of such damages.
|
867 |
+
|
868 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
869 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
870 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
871 |
+
or other liability obligations and/or rights consistent with this
|
872 |
+
License. However, in accepting such obligations, You may act only
|
873 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
874 |
+
of any other Contributor, and only if You agree to indemnify,
|
875 |
+
defend, and hold each Contributor harmless for any liability
|
876 |
+
incurred by, or claims asserted against, such Contributor by reason
|
877 |
+
of your accepting any such warranty or additional liability.
|
878 |
+
|
879 |
+
END OF TERMS AND CONDITIONS
|
880 |
+
|
881 |
+
================================================================================
|
882 |
+
|
883 |
+
[SoftWare] Parakeet 0.4.0
|
884 |
+
|
885 |
+
Copyright Notice(s):
|
886 |
+
• Copyright 2017 Johns Hopkins University
|
887 |
+
|
888 |
+
License: Apache License V2.0
|
889 |
+
|
890 |
+
Full License Text:
|
891 |
+
Apache License
|
892 |
+
Version 2.0, January 2004
|
893 |
+
http://www.apache.org/licenses/
|
894 |
+
|
895 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
896 |
+
|
897 |
+
1. Definitions.
|
898 |
+
|
899 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
900 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
901 |
+
|
902 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
903 |
+
the copyright owner that is granting the License.
|
904 |
+
|
905 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
906 |
+
other entities that control, are controlled by, or are under common
|
907 |
+
control with that entity. For the purposes of this definition,
|
908 |
+
"control" means (i) the power, direct or indirect, to cause the
|
909 |
+
direction or management of such entity, whether by contract or
|
910 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
911 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
912 |
+
|
913 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
914 |
+
exercising permissions granted by this License.
|
915 |
+
|
916 |
+
"Source" form shall mean the preferred form for making modifications,
|
917 |
+
including but not limited to software source code, documentation
|
918 |
+
source, and configuration files.
|
919 |
+
|
920 |
+
"Object" form shall mean any form resulting from mechanical
|
921 |
+
transformation or translation of a Source form, including but
|
922 |
+
not limited to compiled object code, generated documentation,
|
923 |
+
and conversions to other media types.
|
924 |
+
|
925 |
+
"Work" shall mean the work of authorship, whether in Source or
|
926 |
+
Object form, made available under the License, as indicated by a
|
927 |
+
copyright notice that is included in or attached to the work
|
928 |
+
(an example is provided in the Appendix below).
|
929 |
+
|
930 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
931 |
+
form, that is based on (or derived from) the Work and for which the
|
932 |
+
editorial revisions, annotations, elaborations, or other modifications
|
933 |
+
represent, as a whole, an original work of authorship. For the purposes
|
934 |
+
of this License, Derivative Works shall not include works that remain
|
935 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
936 |
+
the Work and Derivative Works thereof.
|
937 |
+
|
938 |
+
"Contribution" shall mean any work of authorship, including
|
939 |
+
the original version of the Work and any modifications or additions
|
940 |
+
to that Work or Derivative Works thereof, that is intentionally
|
941 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
942 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
943 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
944 |
+
means any form of electronic, verbal, or written communication sent
|
945 |
+
to the Licensor or its representatives, including but not limited to
|
946 |
+
communication on electronic mailing lists, source code control systems,
|
947 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
948 |
+
Licensor for the purpose of discussing and improving the Work, but
|
949 |
+
excluding communication that is conspicuously marked or otherwise
|
950 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
951 |
+
|
952 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
953 |
+
on behalf of whom a Contribution has been received by Licensor and
|
954 |
+
subsequently incorporated within the Work.
|
955 |
+
|
956 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
957 |
+
this License, each Contributor hereby grants to You a perpetual,
|
958 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
959 |
+
copyright license to reproduce, prepare Derivative Works of,
|
960 |
+
publicly display, publicly perform, sublicense, and distribute the
|
961 |
+
Work and such Derivative Works in Source or Object form.
|
962 |
+
|
963 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
964 |
+
this License, each Contributor hereby grants to You a perpetual,
|
965 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
966 |
+
(except as stated in this section) patent license to make, have made,
|
967 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
968 |
+
where such license applies only to those patent claims licensable
|
969 |
+
by such Contributor that are necessarily infringed by their
|
970 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
971 |
+
with the Work to which such Contribution(s) was submitted. If You
|
972 |
+
institute patent litigation against any entity (including a
|
973 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
974 |
+
or a Contribution incorporated within the Work constitutes direct
|
975 |
+
or contributory patent infringement, then any patent licenses
|
976 |
+
granted to You under this License for that Work shall terminate
|
977 |
+
as of the date such litigation is filed.
|
978 |
+
|
979 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
980 |
+
Work or Derivative Works thereof in any medium, with or without
|
981 |
+
modifications, and in Source or Object form, provided that You
|
982 |
+
meet the following conditions:
|
983 |
+
|
984 |
+
(a) You must give any other recipients of the Work or
|
985 |
+
Derivative Works a copy of this License; and
|
986 |
+
|
987 |
+
(b) You must cause any modified files to carry prominent notices
|
988 |
+
stating that You changed the files; and
|
989 |
+
|
990 |
+
(c) You must retain, in the Source form of any Derivative Works
|
991 |
+
that You distribute, all copyright, patent, trademark, and
|
992 |
+
attribution notices from the Source form of the Work,
|
993 |
+
excluding those notices that do not pertain to any part of
|
994 |
+
the Derivative Works; and
|
995 |
+
|
996 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
997 |
+
distribution, then any Derivative Works that You distribute must
|
998 |
+
include a readable copy of the attribution notices contained
|
999 |
+
within such NOTICE file, excluding those notices that do not
|
1000 |
+
pertain to any part of the Derivative Works, in at least one
|
1001 |
+
of the following places: within a NOTICE text file distributed
|
1002 |
+
as part of the Derivative Works; within the Source form or
|
1003 |
+
documentation, if provided along with the Derivative Works; or,
|
1004 |
+
within a display generated by the Derivative Works, if and
|
1005 |
+
wherever such third-party notices normally appear. The contents
|
1006 |
+
of the NOTICE file are for informational purposes only and
|
1007 |
+
do not modify the License. You may add Your own attribution
|
1008 |
+
notices within Derivative Works that You distribute, alongside
|
1009 |
+
or as an addendum to the NOTICE text from the Work, provided
|
1010 |
+
that such additional attribution notices cannot be construed
|
1011 |
+
as modifying the License.
|
1012 |
+
|
1013 |
+
You may add Your own copyright statement to Your modifications and
|
1014 |
+
may provide additional or different license terms and conditions
|
1015 |
+
for use, reproduction, or distribution of Your modifications, or
|
1016 |
+
for any such Derivative Works as a whole, provided Your use,
|
1017 |
+
reproduction, and distribution of the Work otherwise complies with
|
1018 |
+
the conditions stated in this License.
|
1019 |
+
|
1020 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
1021 |
+
any Contribution intentionally submitted for inclusion in the Work
|
1022 |
+
by You to the Licensor shall be under the terms and conditions of
|
1023 |
+
this License, without any additional terms or conditions.
|
1024 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
1025 |
+
the terms of any separate license agreement you may have executed
|
1026 |
+
with Licensor regarding such Contributions.
|
1027 |
+
|
1028 |
+
6. Trademarks. This License does not grant permission to use the trade
|
1029 |
+
names, trademarks, service marks, or product names of the Licensor,
|
1030 |
+
except as required for reasonable and customary use in describing the
|
1031 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
1032 |
+
|
1033 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
1034 |
+
agreed to in writing, Licensor provides the Work (and each
|
1035 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
1036 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
1037 |
+
implied, including, without limitation, any warranties or conditions
|
1038 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
1039 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
1040 |
+
appropriateness of using or redistributing the Work and assume any
|
1041 |
+
risks associated with Your exercise of permissions under this License.
|
1042 |
+
|
1043 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
1044 |
+
whether in tort (including negligence), contract, or otherwise,
|
1045 |
+
unless required by applicable law (such as deliberate and grossly
|
1046 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
1047 |
+
liable to You for damages, including any direct, indirect, special,
|
1048 |
+
incidental, or consequential damages of any character arising as a
|
1049 |
+
result of this License or out of the use or inability to use the
|
1050 |
+
Work (including but not limited to damages for loss of goodwill,
|
1051 |
+
work stoppage, computer failure or malfunction, or any and all
|
1052 |
+
other commercial damages or losses), even if such Contributor
|
1053 |
+
has been advised of the possibility of such damages.
|
1054 |
+
|
1055 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
1056 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
1057 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
1058 |
+
or other liability obligations and/or rights consistent with this
|
1059 |
+
License. However, in accepting such obligations, You may act only
|
1060 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
1061 |
+
of any other Contributor, and only if You agree to indemnify,
|
1062 |
+
defend, and hold each Contributor harmless for any liability
|
1063 |
+
incurred by, or claims asserted against, such Contributor by reason
|
1064 |
+
of your accepting any such warranty or additional liability.
|
1065 |
+
|
1066 |
+
END OF TERMS AND CONDITIONS
|
1067 |
+
|
1068 |
+
================================================================================
|
1069 |
+
|
1070 |
+
[SoftWare] PaddleSpeech 0.2.0
|
1071 |
+
|
1072 |
+
Copyright Notice(s):
|
1073 |
+
• Copyright (c) 2021 PaddlePaddle Authors
|
1074 |
+
|
1075 |
+
License: Apache License V2.0
|
1076 |
+
|
1077 |
+
Full License Text:
|
1078 |
+
Apache License
|
1079 |
+
Version 2.0, January 2004
|
1080 |
+
http://www.apache.org/licenses/
|
1081 |
+
|
1082 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
1083 |
+
|
1084 |
+
1. Definitions.
|
1085 |
+
|
1086 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
1087 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
1088 |
+
|
1089 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
1090 |
+
the copyright owner that is granting the License.
|
1091 |
+
|
1092 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
1093 |
+
other entities that control, are controlled by, or are under common
|
1094 |
+
control with that entity. For the purposes of this definition,
|
1095 |
+
"control" means (i) the power, direct or indirect, to cause the
|
1096 |
+
direction or management of such entity, whether by contract or
|
1097 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
1098 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
1099 |
+
|
1100 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
1101 |
+
exercising permissions granted by this License.
|
1102 |
+
|
1103 |
+
"Source" form shall mean the preferred form for making modifications,
|
1104 |
+
including but not limited to software source code, documentation
|
1105 |
+
source, and configuration files.
|
1106 |
+
|
1107 |
+
"Object" form shall mean any form resulting from mechanical
|
1108 |
+
transformation or translation of a Source form, including but
|
1109 |
+
not limited to compiled object code, generated documentation,
|
1110 |
+
and conversions to other media types.
|
1111 |
+
|
1112 |
+
"Work" shall mean the work of authorship, whether in Source or
|
1113 |
+
Object form, made available under the License, as indicated by a
|
1114 |
+
copyright notice that is included in or attached to the work
|
1115 |
+
(an example is provided in the Appendix below).
|
1116 |
+
|
1117 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
1118 |
+
form, that is based on (or derived from) the Work and for which the
|
1119 |
+
editorial revisions, annotations, elaborations, or other modifications
|
1120 |
+
represent, as a whole, an original work of authorship. For the purposes
|
1121 |
+
of this License, Derivative Works shall not include works that remain
|
1122 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
1123 |
+
the Work and Derivative Works thereof.
|
1124 |
+
|
1125 |
+
"Contribution" shall mean any work of authorship, including
|
1126 |
+
the original version of the Work and any modifications or additions
|
1127 |
+
to that Work or Derivative Works thereof, that is intentionally
|
1128 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
1129 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
1130 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
1131 |
+
means any form of electronic, verbal, or written communication sent
|
1132 |
+
to the Licensor or its representatives, including but not limited to
|
1133 |
+
communication on electronic mailing lists, source code control systems,
|
1134 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
1135 |
+
Licensor for the purpose of discussing and improving the Work, but
|
1136 |
+
excluding communication that is conspicuously marked or otherwise
|
1137 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
1138 |
+
|
1139 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
1140 |
+
on behalf of whom a Contribution has been received by Licensor and
|
1141 |
+
subsequently incorporated within the Work.
|
1142 |
+
|
1143 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
1144 |
+
this License, each Contributor hereby grants to You a perpetual,
|
1145 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
1146 |
+
copyright license to reproduce, prepare Derivative Works of,
|
1147 |
+
publicly display, publicly perform, sublicense, and distribute the
|
1148 |
+
Work and such Derivative Works in Source or Object form.
|
1149 |
+
|
1150 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
1151 |
+
this License, each Contributor hereby grants to You a perpetual,
|
1152 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
1153 |
+
(except as stated in this section) patent license to make, have made,
|
1154 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
1155 |
+
where such license applies only to those patent claims licensable
|
1156 |
+
by such Contributor that are necessarily infringed by their
|
1157 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
1158 |
+
with the Work to which such Contribution(s) was submitted. If You
|
1159 |
+
institute patent litigation against any entity (including a
|
1160 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
1161 |
+
or a Contribution incorporated within the Work constitutes direct
|
1162 |
+
or contributory patent infringement, then any patent licenses
|
1163 |
+
granted to You under this License for that Work shall terminate
|
1164 |
+
as of the date such litigation is filed.
|
1165 |
+
|
1166 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
1167 |
+
Work or Derivative Works thereof in any medium, with or without
|
1168 |
+
modifications, and in Source or Object form, provided that You
|
1169 |
+
meet the following conditions:
|
1170 |
+
|
1171 |
+
(a) You must give any other recipients of the Work or
|
1172 |
+
Derivative Works a copy of this License; and
|
1173 |
+
|
1174 |
+
(b) You must cause any modified files to carry prominent notices
|
1175 |
+
stating that You changed the files; and
|
1176 |
+
|
1177 |
+
(c) You must retain, in the Source form of any Derivative Works
|
1178 |
+
that You distribute, all copyright, patent, trademark, and
|
1179 |
+
attribution notices from the Source form of the Work,
|
1180 |
+
excluding those notices that do not pertain to any part of
|
1181 |
+
the Derivative Works; and
|
1182 |
+
|
1183 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
1184 |
+
distribution, then any Derivative Works that You distribute must
|
1185 |
+
include a readable copy of the attribution notices contained
|
1186 |
+
within such NOTICE file, excluding those notices that do not
|
1187 |
+
pertain to any part of the Derivative Works, in at least one
|
1188 |
+
of the following places: within a NOTICE text file distributed
|
1189 |
+
as part of the Derivative Works; within the Source form or
|
1190 |
+
documentation, if provided along with the Derivative Works; or,
|
1191 |
+
within a display generated by the Derivative Works, if and
|
1192 |
+
wherever such third-party notices normally appear. The contents
|
1193 |
+
of the NOTICE file are for informational purposes only and
|
1194 |
+
do not modify the License. You may add Your own attribution
|
1195 |
+
notices within Derivative Works that You distribute, alongside
|
1196 |
+
or as an addendum to the NOTICE text from the Work, provided
|
1197 |
+
that such additional attribution notices cannot be construed
|
1198 |
+
as modifying the License.
|
1199 |
+
|
1200 |
+
You may add Your own copyright statement to Your modifications and
|
1201 |
+
may provide additional or different license terms and conditions
|
1202 |
+
for use, reproduction, or distribution of Your modifications, or
|
1203 |
+
for any such Derivative Works as a whole, provided Your use,
|
1204 |
+
reproduction, and distribution of the Work otherwise complies with
|
1205 |
+
the conditions stated in this License.
|
1206 |
+
|
1207 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
1208 |
+
any Contribution intentionally submitted for inclusion in the Work
|
1209 |
+
by You to the Licensor shall be under the terms and conditions of
|
1210 |
+
this License, without any additional terms or conditions.
|
1211 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
1212 |
+
the terms of any separate license agreement you may have executed
|
1213 |
+
with Licensor regarding such Contributions.
|
1214 |
+
|
1215 |
+
6. Trademarks. This License does not grant permission to use the trade
|
1216 |
+
names, trademarks, service marks, or product names of the Licensor,
|
1217 |
+
except as required for reasonable and customary use in describing the
|
1218 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
1219 |
+
|
1220 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
1221 |
+
agreed to in writing, Licensor provides the Work (and each
|
1222 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
1223 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
1224 |
+
implied, including, without limitation, any warranties or conditions
|
1225 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
1226 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
1227 |
+
appropriateness of using or redistributing the Work and assume any
|
1228 |
+
risks associated with Your exercise of permissions under this License.
|
1229 |
+
|
1230 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
1231 |
+
whether in tort (including negligence), contract, or otherwise,
|
1232 |
+
unless required by applicable law (such as deliberate and grossly
|
1233 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
1234 |
+
liable to You for damages, including any direct, indirect, special,
|
1235 |
+
incidental, or consequential damages of any character arising as a
|
1236 |
+
result of this License or out of the use or inability to use the
|
1237 |
+
Work (including but not limited to damages for loss of goodwill,
|
1238 |
+
work stoppage, computer failure or malfunction, or any and all
|
1239 |
+
other commercial damages or losses), even if such Contributor
|
1240 |
+
has been advised of the possibility of such damages.
|
1241 |
+
|
1242 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
1243 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
1244 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
1245 |
+
or other liability obligations and/or rights consistent with this
|
1246 |
+
License. However, in accepting such obligations, You may act only
|
1247 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
1248 |
+
of any other Contributor, and only if You agree to indemnify,
|
1249 |
+
defend, and hold each Contributor harmless for any liability
|
1250 |
+
incurred by, or claims asserted against, such Contributor by reason
|
1251 |
+
of your accepting any such warranty or additional liability.
|
1252 |
+
|
1253 |
+
END OF TERMS AND CONDITIONS
|
1254 |
+
|
1255 |
+
================================================================================
|
1256 |
+
|
1257 |
+
[SoftWare] espresso 3add524
|
1258 |
+
|
1259 |
+
Copyright Notice(s):
|
1260 |
+
• Copyright (c) Yiming Wang
|
1261 |
+
|
1262 |
+
License: MIT License
|
1263 |
+
|
1264 |
+
Full License Text:
|
1265 |
+
MIT License
|
1266 |
+
|
1267 |
+
Copyright for the original fairseq code are held by Facebook, Inc. and its
|
1268 |
+
affiliates as part of project Espresso. All other copyright for project Espresso
|
1269 |
+
are held by Espresso authors.
|
1270 |
+
|
1271 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
1272 |
+
of this software and associated documentation files (the "Software"), to deal
|
1273 |
+
in the Software without restriction, including without limitation the rights
|
1274 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
1275 |
+
copies of the Software, and to permit persons to whom the Software is
|
1276 |
+
furnished to do so, subject to the following conditions:
|
1277 |
+
|
1278 |
+
The above copyright notice and this permission notice shall be included in all
|
1279 |
+
copies or substantial portions of the Software.
|
1280 |
+
|
1281 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
1282 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
1283 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
1284 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
1285 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
1286 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
1287 |
+
SOFTWARE.
|
1288 |
+
|
1289 |
+
================================================================================
|
evaluation/README.md
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
evaluation:
|
2 |
+
|
3 |
+
for evaluation of speaker similarity, user should downloade the latest version of UniSpeech from github:
|
4 |
+
|
5 |
+
git clone https://github.com/Sanyuan-Chen/UniSpeech.git
|
6 |
+
|
7 |
+
and add the path of UniSpeech to PYTHONPATH:
|
8 |
+
|
9 |
+
export PYTHONPATH=/path/to/UniSpeech/downstreams/speaker_verification:$PYTHONPATH
|
evaluation/eval_detok_en.py
ADDED
@@ -0,0 +1,269 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Xiao Chen)
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
from transformers import WhisperProcessor, WhisperForConditionalGeneration
|
16 |
+
import soundfile as sf
|
17 |
+
import scipy
|
18 |
+
import argparse
|
19 |
+
from whisper_normalizer.english import EnglishTextNormalizer
|
20 |
+
import os
|
21 |
+
import string
|
22 |
+
import lingvo.tasks.asr.tools.simple_wer_v2 as WER
|
23 |
+
from tqdm import tqdm
|
24 |
+
import logging
|
25 |
+
import torch
|
26 |
+
|
27 |
+
keyphrases = None
|
28 |
+
english_normalizer = EnglishTextNormalizer()
|
29 |
+
device = torch.device("cuda")
|
30 |
+
en_asr_model_path = "./whisper-large-v3"
|
31 |
+
|
32 |
+
|
33 |
+
wer_obj = WER.SimpleWER(
|
34 |
+
key_phrases=keyphrases,
|
35 |
+
html_handler=WER.HighlightAlignedHtmlHandler(WER.HighlightAlignedHtml),
|
36 |
+
preprocess_handler=WER.RemoveCommentTxtPreprocess,
|
37 |
+
)
|
38 |
+
|
39 |
+
|
40 |
+
def dummy_split_text(text):
|
41 |
+
return text
|
42 |
+
|
43 |
+
|
44 |
+
def remove_punct(text):
|
45 |
+
puncts = set(string.punctuation)
|
46 |
+
output = ""
|
47 |
+
for char in text:
|
48 |
+
if char not in puncts:
|
49 |
+
output += char
|
50 |
+
output = output.replace(" ", " ")
|
51 |
+
return output
|
52 |
+
|
53 |
+
|
54 |
+
def get_gt_ref_texts_and_wav_files(
|
55 |
+
args, gt_test_lst, gt_folder, punct_remover, text_spliter
|
56 |
+
):
|
57 |
+
wav_file_list = []
|
58 |
+
reference = []
|
59 |
+
with open(gt_test_lst, "r") as fp:
|
60 |
+
for line in fp:
|
61 |
+
fields = line.strip().split("|")
|
62 |
+
wav_file = f"{gt_folder}/{fields[0]}.wav"
|
63 |
+
|
64 |
+
if not os.path.isfile(wav_file):
|
65 |
+
continue
|
66 |
+
|
67 |
+
wav_file_list.append(wav_file)
|
68 |
+
text = fields[-1].lower()
|
69 |
+
if args.norm_text:
|
70 |
+
truth_text = english_normalizer(text) # " ".join(fields[-1])
|
71 |
+
elif args.remove_punct:
|
72 |
+
truth_text = punct_remover(text)
|
73 |
+
else:
|
74 |
+
truth_text = text
|
75 |
+
truth_text = text_spliter(truth_text)
|
76 |
+
reference.append([truth_text, fields[-1]])
|
77 |
+
|
78 |
+
assert len(reference) == len(wav_file_list)
|
79 |
+
return reference, wav_file_list
|
80 |
+
|
81 |
+
|
82 |
+
def get_ref_texts_and_gen_files(
|
83 |
+
args, test_lst, test_folder, punct_remover, text_spliter
|
84 |
+
):
|
85 |
+
reference = []
|
86 |
+
gen_file_list = []
|
87 |
+
with open(test_lst, "r") as fp:
|
88 |
+
for line in fp:
|
89 |
+
fields = line.strip().split("|")
|
90 |
+
filename = fields[2].split("/")[-1]
|
91 |
+
filename = filename.split(".")[0]
|
92 |
+
gen_file = f"{filename}_gen.wav"
|
93 |
+
gen_file_list.append(f"{test_folder}/{gen_file}")
|
94 |
+
|
95 |
+
text = fields[-1].lower()
|
96 |
+
if args.norm_text:
|
97 |
+
truth_text = english_normalizer(text) # " ".join(fields[-1])
|
98 |
+
elif args.remove_punct:
|
99 |
+
truth_text = punct_remover(text)
|
100 |
+
else:
|
101 |
+
truth_text = text
|
102 |
+
|
103 |
+
truth_text = text_spliter(truth_text)
|
104 |
+
reference.append([truth_text, fields[-1]])
|
105 |
+
|
106 |
+
assert len(reference) == len(gen_file_list)
|
107 |
+
return reference, gen_file_list
|
108 |
+
|
109 |
+
|
110 |
+
def get_hypo_texts(args, results_list, punct_remover, text_spliter):
|
111 |
+
hypothesis = []
|
112 |
+
for res in results_list:
|
113 |
+
text = res["text"].lower()
|
114 |
+
if args.norm_text:
|
115 |
+
hypo_text = english_normalizer(text)
|
116 |
+
elif args.remove_punct:
|
117 |
+
hypo_text = punct_remover(text)
|
118 |
+
else:
|
119 |
+
hypo_text = text
|
120 |
+
hypo_text = text_spliter(hypo_text)
|
121 |
+
hypothesis.append([hypo_text, res["text"]])
|
122 |
+
|
123 |
+
return hypothesis
|
124 |
+
|
125 |
+
|
126 |
+
def calc_wer(reference, hypothesis, test_lst):
|
127 |
+
logging.info(f"calc WER:")
|
128 |
+
for idx in tqdm(range(len(hypothesis))):
|
129 |
+
hypo = hypothesis[idx][0].strip()
|
130 |
+
ref = reference[idx][0].strip()
|
131 |
+
wer_obj.AddHypRef(hypo, ref)
|
132 |
+
|
133 |
+
str_summary, str_details, str_keyphrases_info = wer_obj.GetSummaries()
|
134 |
+
logging.info(f"WER summary:")
|
135 |
+
logging.info(str_summary)
|
136 |
+
logging.info(str_details)
|
137 |
+
logging.info(str_keyphrases_info)
|
138 |
+
|
139 |
+
try:
|
140 |
+
fn_output = test_lst + "_diagnosis.html"
|
141 |
+
aligned_html = "<br>".join(wer_obj.aligned_htmls)
|
142 |
+
with open(fn_output, "wt") as fp:
|
143 |
+
fp.write("<body><html>")
|
144 |
+
fp.write("<div>%s</div>" % aligned_html)
|
145 |
+
fp.write("</body></html>")
|
146 |
+
fp.close()
|
147 |
+
|
148 |
+
text_output = test_lst + "_rawtext.lst"
|
149 |
+
with open(text_output, "w") as fp:
|
150 |
+
for ref, hypo in zip(reference, hypothesis):
|
151 |
+
fp.write(f"{ref[1]}|{hypo[1]}\n")
|
152 |
+
fp.close()
|
153 |
+
logging.info(f"Save {fn_output} and {text_output} for diagnosis")
|
154 |
+
except IOError:
|
155 |
+
logging.info("failed to write diagnosis html")
|
156 |
+
|
157 |
+
|
158 |
+
def load_en_model():
|
159 |
+
processor = WhisperProcessor.from_pretrained(en_asr_model_path)
|
160 |
+
model = WhisperForConditionalGeneration.from_pretrained(en_asr_model_path).to(
|
161 |
+
device
|
162 |
+
)
|
163 |
+
return processor, model
|
164 |
+
|
165 |
+
|
166 |
+
def process_wavs(wav_file_list, batch_size=300):
|
167 |
+
results = []
|
168 |
+
processor, model = load_en_model()
|
169 |
+
for wav_file_path in tqdm(wav_file_list):
|
170 |
+
wav, sr = sf.read(wav_file_path)
|
171 |
+
if sr != 16000:
|
172 |
+
wav = scipy.signal.resample(wav, int(len(wav) * 16000 / sr))
|
173 |
+
input_features = processor(
|
174 |
+
wav, sampling_rate=16000, return_tensors="pt"
|
175 |
+
).input_features
|
176 |
+
input_features = input_features.to(device)
|
177 |
+
forced_decoder_ids = processor.get_decoder_prompt_ids(
|
178 |
+
language="english", task="transcribe"
|
179 |
+
)
|
180 |
+
predicted_ids = model.generate(
|
181 |
+
input_features, forced_decoder_ids=forced_decoder_ids
|
182 |
+
)
|
183 |
+
transcription = processor.batch_decode(predicted_ids, skip_special_tokens=True)[
|
184 |
+
0
|
185 |
+
]
|
186 |
+
results.append({"text": transcription.strip()})
|
187 |
+
|
188 |
+
return results
|
189 |
+
|
190 |
+
|
191 |
+
def main(args):
|
192 |
+
handler = logging.FileHandler(filename=args.log_file, mode="w")
|
193 |
+
logging.root.setLevel(logging.INFO)
|
194 |
+
logging.root.addHandler(handler)
|
195 |
+
|
196 |
+
test_path = (
|
197 |
+
args.test_path
|
198 |
+
) # './40ms.AISHELL2.test_with_single_ref.base.chunk25.gen'
|
199 |
+
lst_path = args.test_lst # "40ms.AISHELL2.test_with_single_ref.base.lst"
|
200 |
+
logging.info(
|
201 |
+
f"Evaluate {args.test_path} with Text Normalization: {args.norm_text} and Remove Punct: {args.remove_punct}"
|
202 |
+
)
|
203 |
+
|
204 |
+
if args.eval_gt:
|
205 |
+
logging.info(f"run ASR for GT: {lst_path}")
|
206 |
+
reference, wav_file_list = get_gt_ref_texts_and_wav_files(
|
207 |
+
args, lst_path, test_path, remove_punct, dummy_split_text
|
208 |
+
)
|
209 |
+
results = process_wavs(wav_file_list, batch_size=12)
|
210 |
+
else:
|
211 |
+
logging.info(f"run ASR for detok: {lst_path}")
|
212 |
+
reference, gen_file_list = get_ref_texts_and_gen_files(
|
213 |
+
args, lst_path, test_path, remove_punct, dummy_split_text
|
214 |
+
)
|
215 |
+
results = process_wavs(gen_file_list, batch_size=12)
|
216 |
+
|
217 |
+
hypothesis = get_hypo_texts(args, results, remove_punct, dummy_split_text)
|
218 |
+
|
219 |
+
assert len(hypothesis) == len(reference)
|
220 |
+
logging.info(f"Finish runing ASR for {lst_path}")
|
221 |
+
logging.info(f"hypothesis: {len(hypothesis)} vs reference: {len(reference)}")
|
222 |
+
|
223 |
+
calc_wer(reference, hypothesis, test_path)
|
224 |
+
logging.info(f"Finish evaluate {lst_path}, results are in {args.log_file}")
|
225 |
+
|
226 |
+
|
227 |
+
if __name__ == "__main__":
|
228 |
+
|
229 |
+
parser = argparse.ArgumentParser()
|
230 |
+
parser.add_argument(
|
231 |
+
"--test-path",
|
232 |
+
required=True,
|
233 |
+
type=str,
|
234 |
+
help=f"folder of wav files",
|
235 |
+
)
|
236 |
+
parser.add_argument(
|
237 |
+
"--test-lst",
|
238 |
+
required=True,
|
239 |
+
type=str,
|
240 |
+
help=f"path to test file lst",
|
241 |
+
)
|
242 |
+
parser.add_argument(
|
243 |
+
"--log-file",
|
244 |
+
required=False,
|
245 |
+
type=str,
|
246 |
+
default=None,
|
247 |
+
help=f"path to test file lst",
|
248 |
+
)
|
249 |
+
parser.add_argument(
|
250 |
+
"--norm-text",
|
251 |
+
default=False,
|
252 |
+
action="store_true",
|
253 |
+
help=f"normalized GT and hypo texts",
|
254 |
+
)
|
255 |
+
parser.add_argument(
|
256 |
+
"--remove-punct",
|
257 |
+
default=False,
|
258 |
+
action="store_true",
|
259 |
+
help=f"remove punct from GT and hypo texts",
|
260 |
+
)
|
261 |
+
parser.add_argument(
|
262 |
+
"--eval-gt",
|
263 |
+
default=False,
|
264 |
+
action="store_true",
|
265 |
+
help=f"remove punct from GT and hypo texts",
|
266 |
+
)
|
267 |
+
args = parser.parse_args()
|
268 |
+
|
269 |
+
main(args)
|
evaluation/eval_detok_zh.py
ADDED
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Xiao Chen)
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
import logging
|
16 |
+
from funasr import AutoModel
|
17 |
+
import argparse
|
18 |
+
from zhon.hanzi import punctuation
|
19 |
+
import zhconv
|
20 |
+
import string
|
21 |
+
from tqdm import tqdm
|
22 |
+
from eval_detok_en import (
|
23 |
+
get_gt_ref_texts_and_wav_files,
|
24 |
+
get_ref_texts_and_gen_files,
|
25 |
+
get_hypo_texts,
|
26 |
+
calc_wer,
|
27 |
+
)
|
28 |
+
|
29 |
+
model_path = "./paraformer-zh" # "./speech_paraformer-large-vad-punc_asr_nat-zh-cn-16k-common-vocab8404-pytorch"
|
30 |
+
|
31 |
+
|
32 |
+
def split_text(text):
|
33 |
+
text = " ".join(text)
|
34 |
+
return text
|
35 |
+
|
36 |
+
|
37 |
+
def dummy_split_text(text):
|
38 |
+
return text
|
39 |
+
|
40 |
+
|
41 |
+
def remove_punct(text):
|
42 |
+
puncts = set(punctuation + string.punctuation)
|
43 |
+
output = ""
|
44 |
+
for char in text:
|
45 |
+
if char not in puncts:
|
46 |
+
output += char
|
47 |
+
output = output.replace(" ", " ")
|
48 |
+
return output
|
49 |
+
|
50 |
+
|
51 |
+
def process_wavs(wav_file_list, batch_size=300):
|
52 |
+
model = AutoModel(
|
53 |
+
model=model_path,
|
54 |
+
disable_update=True,
|
55 |
+
)
|
56 |
+
|
57 |
+
results = []
|
58 |
+
for wav_file_path in tqdm(wav_file_list):
|
59 |
+
res = model.generate(
|
60 |
+
input=wav_file_path,
|
61 |
+
batch_size_s=batch_size,
|
62 |
+
)
|
63 |
+
transcription = zhconv.convert(res[0]["text"], "zh-cn")
|
64 |
+
results.append({"text": transcription})
|
65 |
+
return results
|
66 |
+
|
67 |
+
|
68 |
+
def main(args):
|
69 |
+
handler = logging.FileHandler(filename=args.log_file, mode="w")
|
70 |
+
logging.root.setLevel(logging.INFO)
|
71 |
+
logging.root.addHandler(handler)
|
72 |
+
|
73 |
+
test_path = (
|
74 |
+
args.test_path
|
75 |
+
) # './40ms.AISHELL2.test_with_single_ref.base.chunk25.gen'
|
76 |
+
lst_path = args.test_lst # "40ms.AISHELL2.test_with_single_ref.base.lst"
|
77 |
+
|
78 |
+
if args.eval_gt:
|
79 |
+
logging.info(f"run ASR for GT: {lst_path}")
|
80 |
+
reference, wav_file_list = get_gt_ref_texts_and_wav_files(
|
81 |
+
args, lst_path, test_path, remove_punct, split_text
|
82 |
+
)
|
83 |
+
results = process_wavs(wav_file_list, batch_size=300)
|
84 |
+
else:
|
85 |
+
logging.info(f"run ASR for detok: {lst_path}")
|
86 |
+
reference, gen_file_list = get_ref_texts_and_gen_files(
|
87 |
+
args, lst_path, test_path, remove_punct, split_text
|
88 |
+
)
|
89 |
+
results = process_wavs(gen_file_list, batch_size=300)
|
90 |
+
|
91 |
+
hypothesis = get_hypo_texts(args, results, remove_punct, split_text)
|
92 |
+
|
93 |
+
assert len(hypothesis) == len(reference)
|
94 |
+
logging.info(f"Finish runing ASR for {lst_path}")
|
95 |
+
logging.info(f"hypothesis: {len(hypothesis)} vs reference: {len(reference)}")
|
96 |
+
|
97 |
+
calc_wer(reference, hypothesis, test_path)
|
98 |
+
logging.info(f"Finish evaluate {lst_path}, results are in {args.log_file}")
|
99 |
+
|
100 |
+
|
101 |
+
if __name__ == "__main__":
|
102 |
+
|
103 |
+
parser = argparse.ArgumentParser()
|
104 |
+
parser.add_argument(
|
105 |
+
"--test-path",
|
106 |
+
required=True,
|
107 |
+
type=str,
|
108 |
+
help=f"folder of wav files",
|
109 |
+
)
|
110 |
+
parser.add_argument(
|
111 |
+
"--test-lst",
|
112 |
+
required=True,
|
113 |
+
type=str,
|
114 |
+
help=f"path to test file lst",
|
115 |
+
)
|
116 |
+
parser.add_argument(
|
117 |
+
"--log-file",
|
118 |
+
required=False,
|
119 |
+
type=str,
|
120 |
+
default=None,
|
121 |
+
help=f"path to test file lst",
|
122 |
+
)
|
123 |
+
parser.add_argument(
|
124 |
+
"--remove-punct",
|
125 |
+
default=False,
|
126 |
+
action="store_true",
|
127 |
+
help=f"remove punct from GT and hypo texts",
|
128 |
+
)
|
129 |
+
parser.add_argument(
|
130 |
+
"--norm-text",
|
131 |
+
default=False,
|
132 |
+
action="store_true",
|
133 |
+
help=f"normalized GT and hypo texts",
|
134 |
+
)
|
135 |
+
parser.add_argument(
|
136 |
+
"--eval-gt",
|
137 |
+
default=False,
|
138 |
+
action="store_true",
|
139 |
+
help=f"remove punct from GT and hypo texts",
|
140 |
+
)
|
141 |
+
args = parser.parse_args()
|
142 |
+
args.norm_text = False
|
143 |
+
|
144 |
+
main(args)
|
evaluation/eval_sim.py
ADDED
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Xiao Chen)
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
import argparse
|
16 |
+
from tqdm import tqdm
|
17 |
+
import logging
|
18 |
+
import os
|
19 |
+
from verification import init_model, MODEL_LIST
|
20 |
+
import soundfile as sf
|
21 |
+
import torch
|
22 |
+
import numpy as np
|
23 |
+
import torch.nn.functional as F
|
24 |
+
from torchaudio.transforms import Resample
|
25 |
+
import torch.multiprocessing as mp
|
26 |
+
|
27 |
+
console_format = logging.Formatter(
|
28 |
+
"[%(asctime)s][%(filename)s:%(levelname)s][%(process)d:%(threadName)s]%(message)s"
|
29 |
+
)
|
30 |
+
console_handler = logging.StreamHandler()
|
31 |
+
console_handler.setFormatter(console_format)
|
32 |
+
console_handler.setLevel(logging.INFO)
|
33 |
+
if len(logging.root.handlers) > 0:
|
34 |
+
for handler in logging.root.handlers:
|
35 |
+
logging.root.removeHandler(handler)
|
36 |
+
logging.root.addHandler(console_handler)
|
37 |
+
logging.root.setLevel(logging.INFO)
|
38 |
+
|
39 |
+
|
40 |
+
MODEL_NAME = "wavlm_large"
|
41 |
+
S3PRL_PATH = os.environ.get("S3PRL_PATH")
|
42 |
+
if S3PRL_PATH is not None:
|
43 |
+
import patch_unispeech
|
44 |
+
logging.info("Applying Patches for unispeech!!!")
|
45 |
+
patch_unispeech.patch_for_npu()
|
46 |
+
|
47 |
+
|
48 |
+
def get_ref_and_gen_files(
|
49 |
+
test_lst, test_folder, task_queue
|
50 |
+
):
|
51 |
+
with open(test_lst, "r") as fp:
|
52 |
+
for line in fp:
|
53 |
+
fields = line.strip().split("|")
|
54 |
+
gen_name = fields[2].split("/")[-1]
|
55 |
+
gen_name = gen_name.split(".")[0]
|
56 |
+
gen_file = f"{test_folder}/{gen_name}_gen.wav"
|
57 |
+
|
58 |
+
ref_name = fields[0].split("/")[-1]
|
59 |
+
ref_name = ref_name.split(".")[0]
|
60 |
+
ref_file = f"{test_folder}/{ref_name}_ref.wav"
|
61 |
+
|
62 |
+
task_queue.put((ref_file, gen_file))
|
63 |
+
|
64 |
+
return
|
65 |
+
|
66 |
+
|
67 |
+
def eval_speaker_similarity(model, wav1, wav2, rank):
|
68 |
+
wav1, sr1 = sf.read(wav1)
|
69 |
+
wav2, sr2 = sf.read(wav2)
|
70 |
+
|
71 |
+
wav1 = torch.from_numpy(wav1).unsqueeze(0).float()
|
72 |
+
wav2 = torch.from_numpy(wav2).unsqueeze(0).float()
|
73 |
+
resample1 = Resample(orig_freq=sr1, new_freq=16000)
|
74 |
+
resample2 = Resample(orig_freq=sr2, new_freq=16000)
|
75 |
+
wav1 = resample1(wav1)
|
76 |
+
wav2 = resample2(wav2)
|
77 |
+
|
78 |
+
wav1 = wav1.cuda(f"cuda:{rank}")
|
79 |
+
wav2 = wav2.cuda(f"cuda:{rank}")
|
80 |
+
|
81 |
+
model.eval()
|
82 |
+
with torch.no_grad():
|
83 |
+
emb1 = model(wav1)
|
84 |
+
emb2 = model(wav2)
|
85 |
+
|
86 |
+
sim = F.cosine_similarity(emb1, emb2)
|
87 |
+
logging.info("The similarity score between two audios is %.4f (-1.0, 1.0)." % (sim[0].item()))
|
88 |
+
return sim[0].item()
|
89 |
+
|
90 |
+
|
91 |
+
def eval_proc(model_path, task_queue, rank, sim_list):
|
92 |
+
model = None
|
93 |
+
assert MODEL_NAME in MODEL_LIST, 'The model_name should be in {}'.format(MODEL_LIST)
|
94 |
+
model = init_model(MODEL_NAME, model_path) if model is None else model
|
95 |
+
model.to(f"cuda:{rank}")
|
96 |
+
# sim_list = []
|
97 |
+
# for ref, gen in tqdm(ref_gen_list):
|
98 |
+
while True:
|
99 |
+
try:
|
100 |
+
new_record = task_queue.get()
|
101 |
+
if new_record is None:
|
102 |
+
logging.info("FINISH processing all inputs")
|
103 |
+
break
|
104 |
+
|
105 |
+
ref = new_record[0]
|
106 |
+
gen = new_record[1]
|
107 |
+
logging.info(f"eval SIM: {ref} v.s. {gen}")
|
108 |
+
|
109 |
+
if not os.path.exists(ref) or not os.path.exists(gen):
|
110 |
+
logging.info(f"MISSING: {ref} v.s. {gen}")
|
111 |
+
continue
|
112 |
+
|
113 |
+
sim = eval_speaker_similarity(model, ref, gen, rank)
|
114 |
+
sim_list.append((sim, ref, gen))
|
115 |
+
except:
|
116 |
+
logging.info(f"FAIL to eval SIM: {ref} v.s. {gen}")
|
117 |
+
|
118 |
+
|
119 |
+
def main(args):
|
120 |
+
handler = logging.FileHandler(filename=args.log_file, mode="w")
|
121 |
+
logging.root.addHandler(handler)
|
122 |
+
|
123 |
+
device_list = [0]
|
124 |
+
if "CUDA_VISIBLE_DEVICES" in os.environ:
|
125 |
+
device_list = [int(x.strip()) for x in os.environ["CUDA_VISIBLE_DEVICES"].split(",")]
|
126 |
+
|
127 |
+
logging.info(f"Using devices: {device_list}")
|
128 |
+
n_procs = len(device_list)
|
129 |
+
ctx = mp.get_context('spawn')
|
130 |
+
with ctx.Manager() as manager:
|
131 |
+
sim_list = manager.list()
|
132 |
+
task_queue = manager.Queue()
|
133 |
+
get_ref_and_gen_files(args.test_lst, args.test_path, task_queue)
|
134 |
+
|
135 |
+
processes = []
|
136 |
+
for idx in range(n_procs):
|
137 |
+
task_queue.put(None)
|
138 |
+
rank = idx # device_list[idx]
|
139 |
+
p = ctx.Process(target=eval_proc, args=(args.model_path, task_queue, rank, sim_list))
|
140 |
+
processes.append(p)
|
141 |
+
|
142 |
+
for proc in processes:
|
143 |
+
proc.start()
|
144 |
+
|
145 |
+
for proc in processes:
|
146 |
+
proc.join()
|
147 |
+
|
148 |
+
sim_scores = []
|
149 |
+
for sim, ref, gen in sim_list:
|
150 |
+
logging.info(f"{ref} vs {gen} : {sim}")
|
151 |
+
sim_scores.append(sim)
|
152 |
+
avg_sim = round(np.mean(np.array(list(sim_scores))), 3)
|
153 |
+
logging.info("total evaluated wav pairs: %d" % (len(sim_list)))
|
154 |
+
logging.info("The average similarity score of %s is %.4f (-1.0, 1.0)." % (args.test_path, avg_sim))
|
155 |
+
return avg_sim
|
156 |
+
|
157 |
+
|
158 |
+
if __name__ == "__main__":
|
159 |
+
|
160 |
+
parser = argparse.ArgumentParser()
|
161 |
+
parser.add_argument(
|
162 |
+
"--test-path",
|
163 |
+
required=True,
|
164 |
+
type=str,
|
165 |
+
help=f"folder of wav files",
|
166 |
+
)
|
167 |
+
parser.add_argument(
|
168 |
+
"--test-lst",
|
169 |
+
required=True,
|
170 |
+
type=str,
|
171 |
+
help=f"path to test file lst",
|
172 |
+
)
|
173 |
+
parser.add_argument(
|
174 |
+
"--log-file",
|
175 |
+
required=False,
|
176 |
+
type=str,
|
177 |
+
default=None,
|
178 |
+
help=f"path to test file lst",
|
179 |
+
)
|
180 |
+
parser.add_argument(
|
181 |
+
"--model-path",
|
182 |
+
type=str,
|
183 |
+
default="./wavlm-sv",
|
184 |
+
help=f"path to sv model",
|
185 |
+
)
|
186 |
+
|
187 |
+
args = parser.parse_args()
|
188 |
+
main(args)
|
evaluation/patch_unispeech.py
ADDED
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Xiao Chen)
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
from patch_utils import MindSpeedPatchesManager as aspm
|
16 |
+
import os
|
17 |
+
import torch
|
18 |
+
import torch.nn as nn
|
19 |
+
import logging
|
20 |
+
import torchaudio.transforms as trans
|
21 |
+
from s3prl.upstream.wavlm.expert import UpstreamExpert as s3prl_UpstreamExpert
|
22 |
+
from models.ecapa_tdnn import Conv1dReluBn, SE_Res2Block, AttentiveStatsPool
|
23 |
+
from models.ecapa_tdnn import ECAPA_TDNN_SMALL, ECAPA_TDNN
|
24 |
+
|
25 |
+
def init_model_patched(model_name, checkpoint=None):
|
26 |
+
S3PRL_PATH = os.environ.get("S3PRL_PATH")
|
27 |
+
if model_name == 'unispeech_sat':
|
28 |
+
config_path = 'config/unispeech_sat.th'
|
29 |
+
model = ECAPA_TDNN_SMALL(feat_dim=1024, feat_type='unispeech_sat', config_path=config_path)
|
30 |
+
elif model_name == 'wavlm_base_plus':
|
31 |
+
config_path = None
|
32 |
+
model = ECAPA_TDNN_SMALL(feat_dim=768, feat_type='wavlm_base_plus', config_path=config_path)
|
33 |
+
elif model_name == 'wavlm_large':
|
34 |
+
config_path = S3PRL_PATH
|
35 |
+
model = ECAPA_TDNN_SMALL(feat_dim=1024, feat_type='wavlm_large', config_path=config_path)
|
36 |
+
elif model_name == 'hubert_large':
|
37 |
+
config_path = None
|
38 |
+
model = ECAPA_TDNN_SMALL(feat_dim=1024, feat_type='hubert_large_ll60k', config_path=config_path)
|
39 |
+
elif model_name == 'wav2vec2_xlsr':
|
40 |
+
config_path = None
|
41 |
+
model = ECAPA_TDNN_SMALL(feat_dim=1024, feat_type='wav2vec2_xlsr', config_path=config_path)
|
42 |
+
else:
|
43 |
+
model = ECAPA_TDNN_SMALL(feat_dim=40, feat_type='fbank')
|
44 |
+
|
45 |
+
if checkpoint is not None:
|
46 |
+
state_dict = torch.load(checkpoint, map_location=lambda storage, loc: storage)
|
47 |
+
model.load_state_dict(state_dict['model'], strict=False)
|
48 |
+
return model
|
49 |
+
|
50 |
+
|
51 |
+
class patched_ECAPA_TDNN(ECAPA_TDNN):
|
52 |
+
def __init__(self, feat_dim=80, channels=512, emb_dim=192, global_context_att=False,
|
53 |
+
feat_type='fbank', sr=16000, feature_selection="hidden_states", update_extract=False, config_path=None):
|
54 |
+
super(ECAPA_TDNN, self).__init__()
|
55 |
+
|
56 |
+
self.feat_type = feat_type
|
57 |
+
self.feature_selection = feature_selection
|
58 |
+
self.update_extract = update_extract
|
59 |
+
self.sr = sr
|
60 |
+
|
61 |
+
if feat_type == "fbank" or feat_type == "mfcc":
|
62 |
+
self.update_extract = False
|
63 |
+
|
64 |
+
win_len = int(sr * 0.025)
|
65 |
+
hop_len = int(sr * 0.01)
|
66 |
+
|
67 |
+
if feat_type == 'fbank':
|
68 |
+
self.feature_extract = trans.MelSpectrogram(sample_rate=sr, n_fft=512, win_length=win_len,
|
69 |
+
hop_length=hop_len, f_min=0.0, f_max=sr // 2,
|
70 |
+
pad=0, n_mels=feat_dim)
|
71 |
+
elif feat_type == 'mfcc':
|
72 |
+
melkwargs = {
|
73 |
+
'n_fft': 512,
|
74 |
+
'win_length': win_len,
|
75 |
+
'hop_length': hop_len,
|
76 |
+
'f_min': 0.0,
|
77 |
+
'f_max': sr // 2,
|
78 |
+
'pad': 0
|
79 |
+
}
|
80 |
+
self.feature_extract = trans.MFCC(sample_rate=sr, n_mfcc=feat_dim, log_mels=False,
|
81 |
+
melkwargs=melkwargs)
|
82 |
+
else:
|
83 |
+
if config_path is None:
|
84 |
+
self.feature_extract = torch.hub.load('s3prl/s3prl', feat_type)
|
85 |
+
else:
|
86 |
+
self.feature_extract = s3prl_UpstreamExpert(config_path)
|
87 |
+
if len(self.feature_extract.model.encoder.layers) == 24 and hasattr(self.feature_extract.model.encoder.layers[23].self_attn, "fp32_attention"):
|
88 |
+
self.feature_extract.model.encoder.layers[23].self_attn.fp32_attention = False
|
89 |
+
if len(self.feature_extract.model.encoder.layers) == 24 and hasattr(self.feature_extract.model.encoder.layers[11].self_attn, "fp32_attention"):
|
90 |
+
self.feature_extract.model.encoder.layers[11].self_attn.fp32_attention = False
|
91 |
+
|
92 |
+
self.feat_num = self.get_feat_num()
|
93 |
+
self.feature_weight = nn.Parameter(torch.zeros(self.feat_num))
|
94 |
+
|
95 |
+
if feat_type != 'fbank' and feat_type != 'mfcc':
|
96 |
+
freeze_list = ['final_proj', 'label_embs_concat', 'mask_emb', 'project_q', 'quantizer']
|
97 |
+
for name, param in self.feature_extract.named_parameters():
|
98 |
+
for freeze_val in freeze_list:
|
99 |
+
if freeze_val in name:
|
100 |
+
param.requires_grad = False
|
101 |
+
break
|
102 |
+
|
103 |
+
if not self.update_extract:
|
104 |
+
for param in self.feature_extract.parameters():
|
105 |
+
param.requires_grad = False
|
106 |
+
|
107 |
+
self.instance_norm = nn.InstanceNorm1d(feat_dim)
|
108 |
+
# self.channels = [channels] * 4 + [channels * 3]
|
109 |
+
self.channels = [channels] * 4 + [1536]
|
110 |
+
|
111 |
+
self.layer1 = Conv1dReluBn(feat_dim, self.channels[0], kernel_size=5, padding=2)
|
112 |
+
self.layer2 = SE_Res2Block(self.channels[0], self.channels[1], kernel_size=3, stride=1, padding=2, dilation=2, scale=8, se_bottleneck_dim=128)
|
113 |
+
self.layer3 = SE_Res2Block(self.channels[1], self.channels[2], kernel_size=3, stride=1, padding=3, dilation=3, scale=8, se_bottleneck_dim=128)
|
114 |
+
self.layer4 = SE_Res2Block(self.channels[2], self.channels[3], kernel_size=3, stride=1, padding=4, dilation=4, scale=8, se_bottleneck_dim=128)
|
115 |
+
|
116 |
+
# self.conv = nn.Conv1d(self.channels[-1], self.channels[-1], kernel_size=1)
|
117 |
+
cat_channels = channels * 3
|
118 |
+
self.conv = nn.Conv1d(cat_channels, self.channels[-1], kernel_size=1)
|
119 |
+
self.pooling = AttentiveStatsPool(self.channels[-1], attention_channels=128, global_context_att=global_context_att)
|
120 |
+
self.bn = nn.BatchNorm1d(self.channels[-1] * 2)
|
121 |
+
self.linear = nn.Linear(self.channels[-1] * 2, emb_dim)
|
122 |
+
|
123 |
+
|
124 |
+
def patched_ECAPA_TDNN_SMALL(feat_dim, emb_dim=256, feat_type='fbank', sr=16000, feature_selection="hidden_states", update_extract=False, config_path=None):
|
125 |
+
return patched_ECAPA_TDNN(feat_dim=feat_dim, channels=512, emb_dim=emb_dim,
|
126 |
+
feat_type=feat_type, sr=sr, feature_selection=feature_selection, update_extract=update_extract, config_path=config_path)
|
127 |
+
|
128 |
+
def patch_for_npu():
|
129 |
+
aspm.register_patch('models.ecapa_tdnn.ECAPA_TDNN_SMALL', patched_ECAPA_TDNN_SMALL)
|
130 |
+
aspm.register_patch('verification.init_model', init_model_patched)
|
131 |
+
aspm.apply_patches()
|
evaluation/patch_utils.py
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# copied from https://gitee.com/ascend/MindSpeed/blob/master/mindspeed/patch_utils.py
|
2 |
+
|
3 |
+
import importlib
|
4 |
+
import sys
|
5 |
+
import types
|
6 |
+
|
7 |
+
|
8 |
+
def get_func_name(func):
|
9 |
+
if isinstance(func, str):
|
10 |
+
return func
|
11 |
+
return '.'.join((func.__module__, func.__qualname__))
|
12 |
+
|
13 |
+
|
14 |
+
def dummy_function_wrapper(func_name):
|
15 |
+
def dummy_function(*args, **kwargs):
|
16 |
+
raise RuntimeError('function {} no exist'.format(func_name))
|
17 |
+
|
18 |
+
return dummy_function
|
19 |
+
|
20 |
+
|
21 |
+
class Patch:
|
22 |
+
def __init__(self, orig_func_name, new_func, create_dummy):
|
23 |
+
split_name = orig_func_name.rsplit('.', 1)
|
24 |
+
if len(split_name) == 1:
|
25 |
+
self.orig_module_name, self.orig_func_name = orig_func_name, None
|
26 |
+
else:
|
27 |
+
self.orig_module_name, self.orig_func_name = split_name
|
28 |
+
self.orig_module = None
|
29 |
+
self.orig_func = None
|
30 |
+
|
31 |
+
self.patch_func = None
|
32 |
+
self.wrappers = []
|
33 |
+
if new_func is None:
|
34 |
+
new_func = dummy_function_wrapper(orig_func_name)
|
35 |
+
self.set_patch_func(new_func)
|
36 |
+
self.is_applied = False
|
37 |
+
self.create_dummy = create_dummy
|
38 |
+
|
39 |
+
@property
|
40 |
+
def orig_func_id(self):
|
41 |
+
return id(self.orig_func)
|
42 |
+
|
43 |
+
@property
|
44 |
+
def patch_func_id(self):
|
45 |
+
return id(self.patch_func)
|
46 |
+
|
47 |
+
def set_patch_func(self, new_func, force_patch=False):
|
48 |
+
if hasattr(new_func, '__name__') and new_func.__name__.endswith(('wrapper', 'decorator')):
|
49 |
+
self.wrappers.append(new_func)
|
50 |
+
else:
|
51 |
+
if self.patch_func and not force_patch:
|
52 |
+
raise RuntimeError('the patch of {} exist !'.format(self.orig_func_name))
|
53 |
+
self.patch_func = new_func
|
54 |
+
self.is_applied = False
|
55 |
+
|
56 |
+
def apply_patch(self):
|
57 |
+
if self.is_applied:
|
58 |
+
return
|
59 |
+
|
60 |
+
self.orig_module, self.orig_func = Patch.parse_path(self.orig_module_name, self.orig_func_name, self.create_dummy)
|
61 |
+
|
62 |
+
final_patch_func = self.orig_func
|
63 |
+
if self.patch_func is not None:
|
64 |
+
final_patch_func = self.patch_func
|
65 |
+
|
66 |
+
for wrapper in self.wrappers:
|
67 |
+
final_patch_func = wrapper(final_patch_func)
|
68 |
+
|
69 |
+
if self.orig_func_name is not None:
|
70 |
+
setattr(self.orig_module, self.orig_func_name, final_patch_func)
|
71 |
+
for key, value in sys.modules.copy().items():
|
72 |
+
if self.orig_func_name is not None and hasattr(value, self.orig_func_name) \
|
73 |
+
and id(getattr(value, self.orig_func_name)) == self.orig_func_id:
|
74 |
+
setattr(value, self.orig_func_name, final_patch_func)
|
75 |
+
self.is_applied = True
|
76 |
+
|
77 |
+
@staticmethod
|
78 |
+
def parse_path(module_path, function_name, create_dummy):
|
79 |
+
from importlib.machinery import ModuleSpec
|
80 |
+
modules = module_path.split('.')
|
81 |
+
for i in range(1, len(modules) + 1):
|
82 |
+
parent = '.'.join(modules[:i - 1])
|
83 |
+
path = '.'.join(modules[:i])
|
84 |
+
try:
|
85 |
+
importlib.import_module(path)
|
86 |
+
except ModuleNotFoundError as e:
|
87 |
+
if not parent or not hasattr(importlib.import_module(parent), modules[i - 1]):
|
88 |
+
if not create_dummy:
|
89 |
+
raise ModuleNotFoundError(e) from e
|
90 |
+
sys.modules[path] = types.ModuleType(path)
|
91 |
+
sys.modules[path].__file__ = 'mindspeed.dummy_module.py'
|
92 |
+
sys.modules[path].__spec__ = ModuleSpec(path, None)
|
93 |
+
if parent:
|
94 |
+
setattr(importlib.import_module(parent), modules[i - 1], sys.modules[path])
|
95 |
+
else:
|
96 |
+
module = getattr(importlib.import_module(parent), modules[i - 1])
|
97 |
+
if hasattr(module, function_name):
|
98 |
+
return module, getattr(module, function_name)
|
99 |
+
elif create_dummy:
|
100 |
+
return module, dummy_function_wrapper(function_name)
|
101 |
+
else:
|
102 |
+
raise RuntimeError('no exist {} of {}'.format(function_name, module))
|
103 |
+
|
104 |
+
if function_name is not None and not hasattr(sys.modules[module_path], function_name):
|
105 |
+
setattr(sys.modules[module_path], function_name, None)
|
106 |
+
return sys.modules[module_path], getattr(sys.modules[module_path], function_name) if function_name is not None else None
|
107 |
+
|
108 |
+
|
109 |
+
class MindSpeedPatchesManager:
|
110 |
+
patches_info = {}
|
111 |
+
|
112 |
+
@staticmethod
|
113 |
+
def register_patch(orig_func_name, new_func=None, force_patch=False, create_dummy=False):
|
114 |
+
if orig_func_name not in MindSpeedPatchesManager.patches_info:
|
115 |
+
MindSpeedPatchesManager.patches_info[orig_func_name] = Patch(orig_func_name, new_func, create_dummy)
|
116 |
+
else:
|
117 |
+
MindSpeedPatchesManager.patches_info.get(orig_func_name).set_patch_func(new_func, force_patch)
|
118 |
+
|
119 |
+
@staticmethod
|
120 |
+
def apply_patches():
|
121 |
+
for patch in MindSpeedPatchesManager.patches_info.values():
|
122 |
+
patch.apply_patch()
|
evaluation/requirements_sim.txt
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
fire
|
2 |
+
omegaconf==2.0.6
|
3 |
+
s3prl==0.4.1
|
4 |
+
numpy==1.21
|
5 |
+
fairseq==0.12.2
|
6 |
+
torch==2.1.0
|
7 |
+
torchaudio==2.1.0
|
evaluation/requirements_wer.txt
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#fire
|
2 |
+
#s3prl==0.4.1
|
3 |
+
#hydra-core==1.0.7
|
4 |
+
#fairseq==0.12.2
|
5 |
+
transformers
|
6 |
+
funasr==1.2.7
|
7 |
+
zhconv
|
8 |
+
zhon
|
9 |
+
whisper_normalizer
|
10 |
+
torch==2.1.0
|
11 |
+
torchaudio==2.1.0
|
12 |
+
lingvo
|
figs/CADiT.jpg
ADDED
![]() |
Git LFS Details
|
figs/F5-streaming.jpg
ADDED
![]() |
Git LFS Details
|
figs/TTS.jpg
ADDED
![]() |
Git LFS Details
|
figs/eval1.jpg
ADDED
![]() |
Git LFS Details
|
figs/eval2.jpg
ADDED
![]() |
Git LFS Details
|
figs/eval3.jpg
ADDED
![]() |
Git LFS Details
|
figs/reconstruction.jpg
ADDED
![]() |
Git LFS Details
|
figs/tokenizer.jpg
ADDED
![]() |
Git LFS Details
|
install_requirements.sh
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# to resolve deps conflicts of fairseq and f5-tts
|
2 |
+
pip install pip==23.1.2
|
3 |
+
pip install setuptools==65.5.1
|
4 |
+
pip install f5-tts==0.3.4
|
5 |
+
pip install -r requirements_npu.txt
|
reconstuction_example.py
ADDED
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Xiao Chen)
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
|
16 |
+
import argparse
|
17 |
+
import librosa
|
18 |
+
import logging
|
19 |
+
import soundfile as sf
|
20 |
+
import sys
|
21 |
+
from pathlib import Path
|
22 |
+
|
23 |
+
|
24 |
+
sub_modules = ["", "semantic_tokenizer/f40ms", "semantic_detokenizer"]
|
25 |
+
for sub in sub_modules:
|
26 |
+
sys.path.append(str((Path(__file__).parent / sub).absolute()))
|
27 |
+
|
28 |
+
from semantic_tokenizer.f40ms.simple_tokenizer_infer import SpeechTokenizer, TOKENIZER_CFG_NAME
|
29 |
+
from semantic_detokenizer.chunk_infer import SpeechDetokenizer
|
30 |
+
|
31 |
+
|
32 |
+
class ReconstructionPipeline:
|
33 |
+
def __init__(
|
34 |
+
self,
|
35 |
+
detok_vocoder: str,
|
36 |
+
tokenizer_cfg_name: str = TOKENIZER_CFG_NAME,
|
37 |
+
tokenizer_cfg_path: str = str(
|
38 |
+
(Path(__file__).parent / "semantic_tokenizer/f40ms/config").absolute()
|
39 |
+
),
|
40 |
+
tokenizer_ckpt: str = str(
|
41 |
+
(
|
42 |
+
Path(__file__).parent / "semantic_tokenizer/f40ms/ckpt/model.pt"
|
43 |
+
).absolute()
|
44 |
+
),
|
45 |
+
detok_model_cfg: str = str(
|
46 |
+
(Path(__file__).parent / "semantic_detokenizer/ckpt/model.yaml").absolute()
|
47 |
+
),
|
48 |
+
detok_ckpt: str = str(
|
49 |
+
(Path(__file__).parent / "semantic_detokenizer/ckpt/model.pt").absolute()
|
50 |
+
),
|
51 |
+
detok_vocab: str = str(
|
52 |
+
(
|
53 |
+
Path(__file__).parent / "semantic_detokenizer/ckpt/vocab_4096.txt"
|
54 |
+
).absolute()
|
55 |
+
),
|
56 |
+
):
|
57 |
+
self.tokenizer_cfg_name = tokenizer_cfg_name
|
58 |
+
self.tokenizer = SpeechTokenizer(
|
59 |
+
ckpt_path=tokenizer_ckpt,
|
60 |
+
cfg_path=tokenizer_cfg_path,
|
61 |
+
cfg_name=self.tokenizer_cfg_name,
|
62 |
+
)
|
63 |
+
|
64 |
+
self.device = "cuda:0"
|
65 |
+
self.detoker = SpeechDetokenizer(
|
66 |
+
vocoder_path=detok_vocoder,
|
67 |
+
model_cfg=detok_model_cfg,
|
68 |
+
ckpt_file=detok_ckpt,
|
69 |
+
vocab_file=detok_vocab,
|
70 |
+
device=self.device,
|
71 |
+
)
|
72 |
+
|
73 |
+
self.token_chunk_len = 75
|
74 |
+
self.chunk_cond_proportion = 0.3
|
75 |
+
self.chunk_look_ahead = 10
|
76 |
+
self.max_ref_duration = 4.5
|
77 |
+
self.ref_audio_cut_from_head = False
|
78 |
+
|
79 |
+
def reconstruct(self, ref_wav, input_wav):
|
80 |
+
ref_wavs_list = []
|
81 |
+
raw_ref_wav, sr = librosa.load(ref_wav, sr=16000)
|
82 |
+
ref_wavs_list.append(raw_ref_wav)
|
83 |
+
|
84 |
+
raw_input_wav, sr = librosa.load(input_wav, sr=16000)
|
85 |
+
ref_wavs_list.append(raw_input_wav)
|
86 |
+
|
87 |
+
token_list, token_info_list = self.tokenizer.extract(
|
88 |
+
ref_wavs_list
|
89 |
+
)
|
90 |
+
ref_tokens = token_info_list[0]["reduced_unit_sequence"]
|
91 |
+
input_tokens = token_info_list[1]["reduced_unit_sequence"]
|
92 |
+
logging.info("tokens for ref wav: %s are [%s]" % (ref_wav, ref_tokens))
|
93 |
+
logging.info("tokens for input wav: %s are [%s]" % (input_wav, input_tokens))
|
94 |
+
|
95 |
+
generated_wave, target_sample_rate = self.detoker.chunk_generate(
|
96 |
+
ref_wav,
|
97 |
+
ref_tokens.split(),
|
98 |
+
input_tokens.split(),
|
99 |
+
self.token_chunk_len,
|
100 |
+
self.chunk_cond_proportion,
|
101 |
+
self.chunk_look_ahead,
|
102 |
+
self.max_ref_duration,
|
103 |
+
self.ref_audio_cut_from_head,
|
104 |
+
)
|
105 |
+
|
106 |
+
if generated_wave is None:
|
107 |
+
logging.info("generation FAILED")
|
108 |
+
return None, None
|
109 |
+
return generated_wave, target_sample_rate
|
110 |
+
|
111 |
+
|
112 |
+
def main(args):
|
113 |
+
# initialize
|
114 |
+
reconsturctor = ReconstructionPipeline(
|
115 |
+
detok_vocoder=args.detok_vocoder,
|
116 |
+
)
|
117 |
+
|
118 |
+
generated_wave, target_sample_rate = reconsturctor.reconstruct(args.ref_wav, args.input_wav)
|
119 |
+
with open(args.output_wav, "wb") as f:
|
120 |
+
sf.write(f.name, generated_wave, target_sample_rate)
|
121 |
+
logging.info(f"write output to: {f.name}")
|
122 |
+
|
123 |
+
logging.info("Finished")
|
124 |
+
return
|
125 |
+
|
126 |
+
|
127 |
+
if __name__ == "__main__":
|
128 |
+
parser = argparse.ArgumentParser()
|
129 |
+
parser.add_argument(
|
130 |
+
"--tokenizer-ckpt",
|
131 |
+
required=False,
|
132 |
+
help="path to ckpt",
|
133 |
+
)
|
134 |
+
parser.add_argument(
|
135 |
+
"--tokenizer-cfg-path",
|
136 |
+
required=False,
|
137 |
+
default="semantic_tokenizer/f40ms/config",
|
138 |
+
help="path to config",
|
139 |
+
)
|
140 |
+
parser.add_argument(
|
141 |
+
"--detok-ckpt",
|
142 |
+
required=False,
|
143 |
+
help="path to ckpt",
|
144 |
+
)
|
145 |
+
parser.add_argument(
|
146 |
+
"--detok-model-cfg",
|
147 |
+
required=False,
|
148 |
+
help="path to model_cfg",
|
149 |
+
)
|
150 |
+
parser.add_argument(
|
151 |
+
"--detok-vocab",
|
152 |
+
required=False,
|
153 |
+
help="path to vocab",
|
154 |
+
)
|
155 |
+
parser.add_argument(
|
156 |
+
"--detok-vocoder",
|
157 |
+
required=True,
|
158 |
+
help="path to vocoder",
|
159 |
+
)
|
160 |
+
parser.add_argument(
|
161 |
+
"--ref-wav",
|
162 |
+
required=True,
|
163 |
+
help="path to ref wav",
|
164 |
+
)
|
165 |
+
parser.add_argument(
|
166 |
+
"--output-wav",
|
167 |
+
required=True,
|
168 |
+
help="path to output reconstructed wav",
|
169 |
+
)
|
170 |
+
parser.add_argument(
|
171 |
+
"--input-wav",
|
172 |
+
required=True,
|
173 |
+
help="input wav to reconstruction",
|
174 |
+
)
|
175 |
+
|
176 |
+
args = parser.parse_args()
|
177 |
+
|
178 |
+
main(args)
|
requirements_npu.txt
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
librosa==0.9.2
|
2 |
+
torch==2.5.1
|
3 |
+
torch-npu==2.5.1
|
4 |
+
torchaudio==2.5.1
|
5 |
+
torchvision==0.20.1
|
6 |
+
omegaconf==2.0.6
|
7 |
+
fairseq==0.12.2
|
8 |
+
Cython
|
9 |
+
numpy==1.23.4
|
10 |
+
g2p_en
|
11 |
+
jieba_fast
|
12 |
+
jieba
|
13 |
+
LangSegment>=0.2.0
|
14 |
+
wordsegment
|
15 |
+
pypinyin
|
16 |
+
cn2an
|
17 |
+
g2pM
|
18 |
+
WeTextProcessing
|
semantic_detokenizer/__init__.py
ADDED
File without changes
|
semantic_detokenizer/chunk_infer.py
ADDED
@@ -0,0 +1,478 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Dehua Tao,
|
2 |
+
# Xiao Chen)
|
3 |
+
|
4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
+
# you may not use this file except in compliance with the License.
|
6 |
+
# You may obtain a copy of the License at
|
7 |
+
|
8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9 |
+
|
10 |
+
# Unless required by applicable law or agreed to in writing, software
|
11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
+
# See the License for the specific language governing permissions and
|
14 |
+
# limitations under the License.
|
15 |
+
|
16 |
+
|
17 |
+
import argparse
|
18 |
+
import os
|
19 |
+
from datetime import datetime
|
20 |
+
from importlib.resources import files
|
21 |
+
from pathlib import Path
|
22 |
+
import tqdm
|
23 |
+
|
24 |
+
import soundfile as sf
|
25 |
+
import time
|
26 |
+
|
27 |
+
from omegaconf import OmegaConf
|
28 |
+
import torchaudio
|
29 |
+
import torch.multiprocessing as mp
|
30 |
+
|
31 |
+
from f5_tts.infer.utils_infer import (
|
32 |
+
load_model,
|
33 |
+
load_vocoder,
|
34 |
+
remove_silence_for_generated_wav,
|
35 |
+
)
|
36 |
+
from utils_infer import (
|
37 |
+
mel_spec_type,
|
38 |
+
target_rms,
|
39 |
+
nfe_step,
|
40 |
+
cfg_strength,
|
41 |
+
sway_sampling_coef,
|
42 |
+
speed,
|
43 |
+
fix_duration,
|
44 |
+
chunk_infer_batch_process
|
45 |
+
)
|
46 |
+
from model.cadit import CADiT
|
47 |
+
|
48 |
+
import logging
|
49 |
+
console_format = logging.Formatter(
|
50 |
+
"[%(asctime)s][%(filename)s:%(levelname)s][%(process)d:%(threadName)s]%(message)s"
|
51 |
+
)
|
52 |
+
console_handler = logging.StreamHandler()
|
53 |
+
console_handler.setFormatter(console_format)
|
54 |
+
console_handler.setLevel(logging.INFO)
|
55 |
+
if len(logging.root.handlers) > 0:
|
56 |
+
for handler in logging.root.handlers:
|
57 |
+
logging.root.removeHandler(handler)
|
58 |
+
logging.root.addHandler(console_handler)
|
59 |
+
logging.root.setLevel(logging.INFO)
|
60 |
+
|
61 |
+
|
62 |
+
TOKENIZE_ON_NPU = os.environ.get("TOKENIZE_ON_NPU")
|
63 |
+
if TOKENIZE_ON_NPU is not None and TOKENIZE_ON_NPU == "1":
|
64 |
+
import torch_npu
|
65 |
+
import f5tts_npu_patch
|
66 |
+
from torch_npu.contrib import transfer_to_npu
|
67 |
+
|
68 |
+
logging.info("Applying Patches for NPU!!!")
|
69 |
+
f5tts_npu_patch.patch_for_npu()
|
70 |
+
|
71 |
+
|
72 |
+
class SpeechDetokenizer:
|
73 |
+
def __init__(self,
|
74 |
+
vocoder_path:str,
|
75 |
+
model_cfg:str = str((Path(__file__).parent / "ckpt/model.yaml").absolute()),
|
76 |
+
ckpt_file:str = str((Path(__file__).parent / "ckpt/model.pt").absolute()),
|
77 |
+
vocab_file:str = str((Path(__file__).parent / "ckpt/vocab_4096.txt").absolute()),
|
78 |
+
device="cuda:0"):
|
79 |
+
self.model_cfg = model_cfg
|
80 |
+
self.ckpt_file = ckpt_file
|
81 |
+
self.vocab_file = vocab_file
|
82 |
+
self.vocoder_path = vocoder_path
|
83 |
+
self.device = device
|
84 |
+
|
85 |
+
self.cross_fade_duration = 0
|
86 |
+
self.initialize()
|
87 |
+
|
88 |
+
def initialize(self):
|
89 |
+
self.model = "CADiT" # "F5TTS"
|
90 |
+
load_vocoder_from_local = True
|
91 |
+
|
92 |
+
self.vocoder_name = mel_spec_type
|
93 |
+
# Vocoder
|
94 |
+
vocoder_local_path = self.vocoder_path # "/home/ma-user/work/chenxiao/workspace/model/vocos_mel_24khz/"
|
95 |
+
|
96 |
+
# TTS model
|
97 |
+
model_cls = CADiT
|
98 |
+
model_cfg = OmegaConf.load(self.model_cfg).model.arch
|
99 |
+
logging.info(f"Using {self.model}...")
|
100 |
+
|
101 |
+
# Load vocoder
|
102 |
+
self.vocoder = load_vocoder(
|
103 |
+
vocoder_name=self.vocoder_name,
|
104 |
+
is_local=load_vocoder_from_local,
|
105 |
+
local_path=vocoder_local_path,
|
106 |
+
device=self.device,
|
107 |
+
)
|
108 |
+
|
109 |
+
# Load TTS model
|
110 |
+
self.ema_model = load_model(
|
111 |
+
model_cls,
|
112 |
+
model_cfg,
|
113 |
+
self.ckpt_file,
|
114 |
+
mel_spec_type=self.vocoder_name,
|
115 |
+
vocab_file=self.vocab_file,
|
116 |
+
device=self.device,
|
117 |
+
)
|
118 |
+
|
119 |
+
# def chunk_text(self, text_list, chunk_len=135, merge_short_last=False):
|
120 |
+
# """
|
121 |
+
# Splits the input text into chunks, each with a maximum number of characters.
|
122 |
+
|
123 |
+
# Args:
|
124 |
+
# text (str): The text to be split.
|
125 |
+
# max_chars (int): The maximum number of characters per chunk.
|
126 |
+
|
127 |
+
# Returns:
|
128 |
+
# List[str]: A list of text chunks.
|
129 |
+
# """
|
130 |
+
# chunks = []
|
131 |
+
|
132 |
+
# # if isinstance(text, list):
|
133 |
+
# for i in range(0, len(text_list), chunk_len):
|
134 |
+
# chunks.append(text_list[i : i + chunk_len])
|
135 |
+
|
136 |
+
# if merge_short_last and len(chunks) >= 2 and len(chunks[-1]) < chunk_len:
|
137 |
+
# # Merge the last two chunks
|
138 |
+
# last = chunks.pop()
|
139 |
+
# second_last = chunks.pop()
|
140 |
+
# chunks.append(second_last + last)
|
141 |
+
|
142 |
+
# return chunks
|
143 |
+
|
144 |
+
|
145 |
+
def chunk_text_with_look_ahead(
|
146 |
+
self, text_list, chunk_look_ahead_len, chunk_len=135, merge_short_last=False
|
147 |
+
):
|
148 |
+
chunks = []
|
149 |
+
|
150 |
+
stride = chunk_len - chunk_look_ahead_len
|
151 |
+
for i in range(0, len(text_list), stride):
|
152 |
+
chk = text_list[i : i + chunk_len]
|
153 |
+
chunks.append(chk)
|
154 |
+
if i + chunk_len >= len(text_list):
|
155 |
+
break
|
156 |
+
|
157 |
+
if (
|
158 |
+
merge_short_last
|
159 |
+
and len(chunks) >= 2
|
160 |
+
and len(chunks[-1]) < stride # chunk_len * 4 / 5
|
161 |
+
):
|
162 |
+
# Merge the last two chunks
|
163 |
+
last = chunks.pop()
|
164 |
+
second_last = chunks.pop()
|
165 |
+
if chunk_look_ahead_len <= 0:
|
166 |
+
chunks.append(second_last + last)
|
167 |
+
else:
|
168 |
+
chunks.append(second_last[:-chunk_look_ahead_len] + last)
|
169 |
+
|
170 |
+
actual_chunks = []
|
171 |
+
for idx in range(len(chunks)):
|
172 |
+
chk = chunks[idx]
|
173 |
+
if chunk_look_ahead_len <= 0:
|
174 |
+
actual_chunks.extend(chk)
|
175 |
+
else:
|
176 |
+
if idx < len(chunks) - 1:
|
177 |
+
actual_chunks.extend(chk[:-chunk_look_ahead_len])
|
178 |
+
else:
|
179 |
+
actual_chunks.extend(chk)
|
180 |
+
|
181 |
+
assert(len(actual_chunks) == len(text_list))
|
182 |
+
assert(actual_chunks == text_list)
|
183 |
+
return chunks
|
184 |
+
|
185 |
+
def chunk_generate(
|
186 |
+
self,
|
187 |
+
ref_audio,
|
188 |
+
ref_text_list,
|
189 |
+
gen_text_list,
|
190 |
+
token_chunk_len,
|
191 |
+
chunk_cond_proportion,
|
192 |
+
chunk_look_ahead_len=0,
|
193 |
+
max_ref_duration=4.5,
|
194 |
+
ref_head_cut=False,
|
195 |
+
):
|
196 |
+
|
197 |
+
gen_text_batches = self.chunk_text_with_look_ahead(
|
198 |
+
gen_text_list,
|
199 |
+
chunk_look_ahead_len,
|
200 |
+
chunk_len=token_chunk_len,
|
201 |
+
merge_short_last=True,
|
202 |
+
)
|
203 |
+
|
204 |
+
if len(gen_text_batches) == 0:
|
205 |
+
return None, None
|
206 |
+
|
207 |
+
for i, gen_text in enumerate(gen_text_batches):
|
208 |
+
logging.info(f"gen_text {i} with {len(gen_text)} tokens : {gen_text}")
|
209 |
+
|
210 |
+
audio, sr = torchaudio.load(ref_audio)
|
211 |
+
logging.info(f"Generating audio in {len(gen_text_batches)} batches...")
|
212 |
+
|
213 |
+
target_wave, target_sample_rate, combined_spectrogram = chunk_infer_batch_process(
|
214 |
+
(audio, sr),
|
215 |
+
ref_text_list,
|
216 |
+
gen_text_batches,
|
217 |
+
self.ema_model,
|
218 |
+
self.vocoder,
|
219 |
+
mel_spec_type=mel_spec_type,
|
220 |
+
progress=tqdm,
|
221 |
+
target_rms=target_rms,
|
222 |
+
cross_fade_duration=self.cross_fade_duration,
|
223 |
+
nfe_step=nfe_step,
|
224 |
+
cfg_strength=cfg_strength,
|
225 |
+
sway_sampling_coef=sway_sampling_coef,
|
226 |
+
speed=speed,
|
227 |
+
fix_duration=fix_duration,
|
228 |
+
device=self.device,
|
229 |
+
chunk_cond_proportion=chunk_cond_proportion,
|
230 |
+
chunk_look_ahead=chunk_look_ahead_len,
|
231 |
+
max_ref_duration=max_ref_duration,
|
232 |
+
ref_head_cut=ref_head_cut,
|
233 |
+
)
|
234 |
+
return target_wave, target_sample_rate
|
235 |
+
|
236 |
+
|
237 |
+
def get_audio_duration(audio_path):
|
238 |
+
audio, sample_rate = torchaudio.load(audio_path)
|
239 |
+
return audio.shape[1] / sample_rate
|
240 |
+
|
241 |
+
|
242 |
+
def get_test_list(testset_path):
|
243 |
+
testset_file_path = testset_path
|
244 |
+
testset_list = [] # list of [ref_text, ref_audio, gen_text, gen_audio(optional)]
|
245 |
+
|
246 |
+
with open(testset_file_path, "r") as f:
|
247 |
+
for line in f:
|
248 |
+
content = line.strip().split("|")
|
249 |
+
if len(content) == 2 or len(content) == 3:
|
250 |
+
testset_list.append([content[1], content[0], content[1]])
|
251 |
+
elif len(content) == 4 or len(content) == 5:
|
252 |
+
testset_list.append([content[1], content[0], content[3], content[2]])
|
253 |
+
return testset_list
|
254 |
+
|
255 |
+
|
256 |
+
def infer(args, task_queue, rank=0):
|
257 |
+
device_spec = f"cuda:{rank}"
|
258 |
+
|
259 |
+
if args.model_cfg is None or args.ckpt is None or args.vocab is None:
|
260 |
+
detoker = SpeechDetokenizer(
|
261 |
+
vocoder_path=args.vocoder,
|
262 |
+
device=device_spec,
|
263 |
+
)
|
264 |
+
else:
|
265 |
+
detoker = SpeechDetokenizer(
|
266 |
+
vocoder_path=args.vocoder,
|
267 |
+
model_cfg=args.model_cfg,
|
268 |
+
ckpt_file=args.ckpt,
|
269 |
+
vocab_file=args.vocab,
|
270 |
+
device=device_spec,
|
271 |
+
)
|
272 |
+
|
273 |
+
token_chunk_len = args.chunk_token
|
274 |
+
chunk_cond_proportion = args.chunk_cond_portion
|
275 |
+
if chunk_cond_proportion > 1 or chunk_cond_proportion <= 0:
|
276 |
+
chunk_cond_proportion = 0.5 # set default
|
277 |
+
|
278 |
+
chunk_look_ahead = args.chunk_look_ahead
|
279 |
+
if chunk_look_ahead >= token_chunk_len:
|
280 |
+
chunk_look_ahead = 0
|
281 |
+
|
282 |
+
remove_silence = False
|
283 |
+
|
284 |
+
output_dir = args.output
|
285 |
+
if not os.path.exists(Path(output_dir)):
|
286 |
+
os.makedirs(Path(output_dir))
|
287 |
+
|
288 |
+
# logging.info(f"Using {model}...")
|
289 |
+
logging.info(f"infer with chunk of {token_chunk_len} tokens")
|
290 |
+
logging.info(f"the last {chunk_cond_proportion} of each chunk added into condition")
|
291 |
+
logging.info(f"Using the last {chunk_look_ahead} tokens as look ahead")
|
292 |
+
|
293 |
+
gen_nums = 0
|
294 |
+
while True:
|
295 |
+
try:
|
296 |
+
_tst = task_queue.get()
|
297 |
+
if _tst is None:
|
298 |
+
logging.info("FINISH processing all inputs")
|
299 |
+
break
|
300 |
+
|
301 |
+
ref_text_list = _tst[0].split()
|
302 |
+
ref_audio = _tst[1]
|
303 |
+
gen_text_list = _tst[2].split()
|
304 |
+
|
305 |
+
if len(_tst) == 4:
|
306 |
+
gen_audio = _tst[3]
|
307 |
+
else:
|
308 |
+
gen_audio = None
|
309 |
+
|
310 |
+
ref_wave_path = (
|
311 |
+
Path(output_dir) / f"{ref_audio.split('/')[-1].split('.')[0]}_ref.wav"
|
312 |
+
)
|
313 |
+
if gen_audio is None:
|
314 |
+
gen_wave_path = (
|
315 |
+
Path(output_dir)
|
316 |
+
/ f"{ref_audio.split('/')[-1].split('.')[0]}_gen.wav"
|
317 |
+
)
|
318 |
+
orig_wave_path = None
|
319 |
+
else:
|
320 |
+
gen_wave_path = (
|
321 |
+
Path(output_dir)
|
322 |
+
/ f"{gen_audio.split('/')[-1].split('.')[0]}_gen.wav"
|
323 |
+
)
|
324 |
+
orig_wave_path = (
|
325 |
+
Path(output_dir)
|
326 |
+
/ f"{gen_audio.split('/')[-1].split('.')[0]}_orig.wav"
|
327 |
+
)
|
328 |
+
|
329 |
+
if os.path.exists(gen_wave_path):
|
330 |
+
logging.info(f"{gen_wave_path} already exist, skip")
|
331 |
+
continue
|
332 |
+
|
333 |
+
if not os.path.exists(ref_wave_path):
|
334 |
+
os.system(f"cp {ref_audio} {ref_wave_path}")
|
335 |
+
|
336 |
+
if gen_audio is not None and os.path.exists(gen_audio) and orig_wave_path:
|
337 |
+
os.system(f"cp {gen_audio} {orig_wave_path}")
|
338 |
+
|
339 |
+
generated_wave, target_sample_rate = detoker.chunk_generate(
|
340 |
+
ref_audio,
|
341 |
+
ref_text_list,
|
342 |
+
gen_text_list,
|
343 |
+
token_chunk_len,
|
344 |
+
chunk_cond_proportion,
|
345 |
+
chunk_look_ahead,
|
346 |
+
args.max_ref_duration,
|
347 |
+
args.ref_audio_cut_from_head,
|
348 |
+
)
|
349 |
+
|
350 |
+
if generated_wave is None:
|
351 |
+
continue
|
352 |
+
|
353 |
+
with open(gen_wave_path, "wb") as f:
|
354 |
+
sf.write(f.name, generated_wave, target_sample_rate)
|
355 |
+
# Remove silence
|
356 |
+
if remove_silence:
|
357 |
+
remove_silence_for_generated_wav(f.name)
|
358 |
+
logging.info(f"write output to: {f.name}")
|
359 |
+
|
360 |
+
gen_nums += 1
|
361 |
+
# if gen_nums >= 10:
|
362 |
+
# break
|
363 |
+
except:
|
364 |
+
logging.info(f"Fail to get new task")
|
365 |
+
|
366 |
+
|
367 |
+
def run_infer_mp(args):
|
368 |
+
|
369 |
+
device_list = [0]
|
370 |
+
if "CUDA_VISIBLE_DEVICES" in os.environ:
|
371 |
+
device_list = [int(x.strip()) for x in os.environ["CUDA_VISIBLE_DEVICES"].split(",")]
|
372 |
+
elif "ASCEND_RT_VISIBLE_DEVICES" in os.environ:
|
373 |
+
device_list = [int(x.strip()) for x in os.environ["ASCEND_RT_VISIBLE_DEVICES"].split(",")]
|
374 |
+
|
375 |
+
logging.info(f"Using devices: {device_list}")
|
376 |
+
n_procs = len(device_list)
|
377 |
+
|
378 |
+
# load testset
|
379 |
+
testset_list = get_test_list(args.testset_path)
|
380 |
+
|
381 |
+
ctx = mp.get_context("spawn")
|
382 |
+
with ctx.Manager() as manager:
|
383 |
+
task_queue = manager.Queue()
|
384 |
+
for task in testset_list:
|
385 |
+
task_queue.put(task)
|
386 |
+
|
387 |
+
processes = []
|
388 |
+
for idx in range(n_procs):
|
389 |
+
task_queue.put(None)
|
390 |
+
rank = idx # device_list[idx]
|
391 |
+
p = mp.Process(target=infer, args=(args, task_queue, rank))
|
392 |
+
p.start()
|
393 |
+
processes.append(p)
|
394 |
+
|
395 |
+
for p in processes:
|
396 |
+
p.join()
|
397 |
+
|
398 |
+
os.system(f"cp {args.testset_path} {args.output}")
|
399 |
+
logging.info(f"Finish processing of {n_procs}")
|
400 |
+
|
401 |
+
|
402 |
+
if __name__ == "__main__":
|
403 |
+
parser = argparse.ArgumentParser()
|
404 |
+
parser.add_argument(
|
405 |
+
"--ckpt",
|
406 |
+
required=False,
|
407 |
+
help="path to ckpt",
|
408 |
+
)
|
409 |
+
parser.add_argument(
|
410 |
+
"--model-cfg",
|
411 |
+
required=False,
|
412 |
+
help="path to model_cfg",
|
413 |
+
)
|
414 |
+
parser.add_argument(
|
415 |
+
"--vocab",
|
416 |
+
required=False,
|
417 |
+
help="path to vocab",
|
418 |
+
)
|
419 |
+
parser.add_argument(
|
420 |
+
"--vocoder",
|
421 |
+
required=True,
|
422 |
+
help="path to vocoder",
|
423 |
+
)
|
424 |
+
parser.add_argument(
|
425 |
+
"--testset",
|
426 |
+
dest="testset_path",
|
427 |
+
required=True,
|
428 |
+
help="path of testset file",
|
429 |
+
)
|
430 |
+
parser.add_argument(
|
431 |
+
"--output",
|
432 |
+
required=True,
|
433 |
+
help="path to output generated audio",
|
434 |
+
)
|
435 |
+
parser.add_argument(
|
436 |
+
"--chunk-token",
|
437 |
+
required=True,
|
438 |
+
type=int,
|
439 |
+
default=25,
|
440 |
+
help="max number of tokens in a chunk",
|
441 |
+
)
|
442 |
+
parser.add_argument(
|
443 |
+
"--chunk-look-ahead",
|
444 |
+
required=False,
|
445 |
+
type=int,
|
446 |
+
default=0,
|
447 |
+
help="number of tokens in a chunk as look ahead",
|
448 |
+
)
|
449 |
+
parser.add_argument(
|
450 |
+
"--chunk-cond-portion",
|
451 |
+
required=True,
|
452 |
+
type=float,
|
453 |
+
default=25,
|
454 |
+
help="the portion at the tail of the prev chunk as condition",
|
455 |
+
)
|
456 |
+
parser.add_argument(
|
457 |
+
"--max-ref-duration",
|
458 |
+
required=False,
|
459 |
+
type=float,
|
460 |
+
default=4.5,
|
461 |
+
help="the max duration of ref audio in seconds",
|
462 |
+
)
|
463 |
+
parser.add_argument(
|
464 |
+
"--ref-audio-cut-from-head",
|
465 |
+
default=False,
|
466 |
+
action="store_true",
|
467 |
+
help="cut ref audio from head, if not set, from tail by default",
|
468 |
+
)
|
469 |
+
|
470 |
+
args = parser.parse_args()
|
471 |
+
|
472 |
+
start_time = time.perf_counter()
|
473 |
+
|
474 |
+
run_infer_mp(args)
|
475 |
+
|
476 |
+
end_time = time.perf_counter()
|
477 |
+
logging.info("processig time: %f sec\n" % (end_time - start_time))
|
478 |
+
logging.info(f"Finished! output to : {args.output}")
|
semantic_detokenizer/ckpt/model.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f99c218589e92c399190175273fa179d3e0ce6661469e6c19cce1fd4cab6abc
|
3 |
+
size 6507564685
|
semantic_detokenizer/ckpt/model.pt.md5
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
7c64bfb2fefb99ac48e7d6bfd72732e3 model.pt
|
semantic_detokenizer/ckpt/model.yaml
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
hydra:
|
2 |
+
run:
|
3 |
+
# _${datasets.name}/
|
4 |
+
dir: ckpts/${model.name}_${model.mel_spec.mel_spec_type}_${model.tokenizer}/${now:%Y-%m-%d}/${now:%H-%M-%S}
|
5 |
+
|
6 |
+
datasets:
|
7 |
+
name: LibriTTS_960_WenetSpeech4TTS_Premium_Standard_TTSDatawithMultiStyleEmotion_Datatang_Haitianruisheng_Qingshu_XiaoyiF143_Bani # dataset name
|
8 |
+
path: /home/ma-user/work/dehua/unit2speech/u2s_training_data/40ms_imedia_45wh_hubert_large_FSQ_8888_CTC_sampled_EN_CH_4000h_wulan_H20_mt_1280k_lr_3e-4_d2v_phase_nomask_20241018_8p_fromDaxin/LibriTTS_WenetSpeech4TTS_TTSDatawithMultiStyleEmotion_XiaoyiF143_Bani/train-960_Premium_Standard_datatang_haitianruisheng_qingshu_freetalk_style1_all-three
|
9 |
+
batch_size_per_gpu: 2000 # 38400 # 8 GPUs, 8 * 38400 = 307200
|
10 |
+
batch_size_type: frame # "frame" or "sample"
|
11 |
+
max_samples: 4 # 64 # max sequences per batch if use frame-wise batch_size. we set 32 for small models, 64 for base models
|
12 |
+
num_workers: 2
|
13 |
+
|
14 |
+
optim:
|
15 |
+
epochs: 1000 # 15
|
16 |
+
learning_rate: 7.5e-5
|
17 |
+
num_warmup_updates: 20000 # warmup steps
|
18 |
+
grad_accumulation_steps: 1 # note: updates = steps / grad_accumulation_steps
|
19 |
+
max_grad_norm: 1.0 # gradient clipping
|
20 |
+
bnb_optimizer: False # use bnb 8bit AdamW optimizer or not
|
21 |
+
|
22 |
+
model:
|
23 |
+
name: CADiT_Base_train_40ms_imedia_45wh_HuBERT_large_FSQ_8888_CTC_sampled_EN_CH_4000h_CTC_LibriTTS_960_WenetSpeech4TTS_Premium_Standard_TTSDatawithMultiStyleEmotion_Datatang_Haitianruisheng_Qingshu_XiaoyiF143_Bani_Shanghai_V100_5n8g_20250414 # model name
|
24 |
+
tokenizer: custom # pinyin; tokenizer type
|
25 |
+
# if tokenizer = 'custom', define the path to the tokenizer you want to use (should be vocab.txt)
|
26 |
+
tokenizer_path: /home/ma-user/work/dehua/unit2speech/u2s_training_data/40ms_imedia_45wh_hubert_large_FSQ_8888_CTC_sampled_EN_CH_4000h_wulan_H20_mt_1280k_lr_3e-4_d2v_phase_nomask_20241018_8p_fromDaxin/LibriTTS_WenetSpeech4TTS_TTSDatawithMultiStyleEmotion_XiaoyiF143_Bani/train-960_Premium_Standard_datatang_haitianruisheng_qingshu_freetalk_style1_all-three/vocab.txt
|
27 |
+
arch:
|
28 |
+
dim: 1024
|
29 |
+
depth: 22
|
30 |
+
heads: 16
|
31 |
+
ff_mult: 2
|
32 |
+
text_dim: 512
|
33 |
+
should_extend_text: True
|
34 |
+
conv_layers: 4
|
35 |
+
checkpoint_activations: False # recompute activations and save memory for extra compute
|
36 |
+
mel_spec:
|
37 |
+
target_sample_rate: 24000
|
38 |
+
n_mel_channels: 100
|
39 |
+
hop_length: 256
|
40 |
+
win_length: 1024
|
41 |
+
n_fft: 1024
|
42 |
+
mel_spec_type: vocos # 'vocos' or 'bigvgan'
|
43 |
+
vocoder:
|
44 |
+
is_local: True # use local offline ckpt or not
|
45 |
+
local_path: /home/ma-user/work/dehua/unit2speech/CA-F5-TTS/pretrained_vocoder/charactr/vocos-mel-24khz # local vocoder path
|
46 |
+
|
47 |
+
ckpts:
|
48 |
+
logger: tensorboard # wandb | tensorboard | None
|
49 |
+
save_per_updates: 50000 # save checkpoint per steps
|
50 |
+
last_per_steps: 2000 # 5000 # save last checkpoint per steps
|
51 |
+
save_dir: ckpts/${model.name}_${model.mel_spec.mel_spec_type}_${model.tokenizer} # _${datasets.name}
|
semantic_detokenizer/ckpt/vocab_4096.txt
ADDED
@@ -0,0 +1,4096 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
0
|
2 |
+
1
|
3 |
+
10
|
4 |
+
100
|
5 |
+
1000
|
6 |
+
1001
|
7 |
+
1002
|
8 |
+
1003
|
9 |
+
1004
|
10 |
+
1005
|
11 |
+
1006
|
12 |
+
1007
|
13 |
+
1008
|
14 |
+
1009
|
15 |
+
101
|
16 |
+
1010
|
17 |
+
1011
|
18 |
+
1012
|
19 |
+
1013
|
20 |
+
1014
|
21 |
+
1015
|
22 |
+
1016
|
23 |
+
1017
|
24 |
+
1018
|
25 |
+
1019
|
26 |
+
102
|
27 |
+
1020
|
28 |
+
1021
|
29 |
+
1022
|
30 |
+
1023
|
31 |
+
1024
|
32 |
+
1025
|
33 |
+
1026
|
34 |
+
1027
|
35 |
+
1028
|
36 |
+
1029
|
37 |
+
103
|
38 |
+
1030
|
39 |
+
1031
|
40 |
+
1032
|
41 |
+
1033
|
42 |
+
1034
|
43 |
+
1035
|
44 |
+
1036
|
45 |
+
1037
|
46 |
+
1038
|
47 |
+
1039
|
48 |
+
104
|
49 |
+
1040
|
50 |
+
1041
|
51 |
+
1042
|
52 |
+
1043
|
53 |
+
1044
|
54 |
+
1045
|
55 |
+
1046
|
56 |
+
1047
|
57 |
+
1048
|
58 |
+
1049
|
59 |
+
105
|
60 |
+
1050
|
61 |
+
1051
|
62 |
+
1052
|
63 |
+
1053
|
64 |
+
1054
|
65 |
+
1055
|
66 |
+
1056
|
67 |
+
1057
|
68 |
+
1058
|
69 |
+
1059
|
70 |
+
106
|
71 |
+
1060
|
72 |
+
1061
|
73 |
+
1062
|
74 |
+
1063
|
75 |
+
1064
|
76 |
+
1065
|
77 |
+
1066
|
78 |
+
1067
|
79 |
+
1068
|
80 |
+
1069
|
81 |
+
107
|
82 |
+
1070
|
83 |
+
1071
|
84 |
+
1072
|
85 |
+
1073
|
86 |
+
1074
|
87 |
+
1075
|
88 |
+
1076
|
89 |
+
1077
|
90 |
+
1078
|
91 |
+
1079
|
92 |
+
108
|
93 |
+
1080
|
94 |
+
1081
|
95 |
+
1082
|
96 |
+
1083
|
97 |
+
1084
|
98 |
+
1085
|
99 |
+
1086
|
100 |
+
1087
|
101 |
+
1088
|
102 |
+
1089
|
103 |
+
109
|
104 |
+
1090
|
105 |
+
1091
|
106 |
+
1092
|
107 |
+
1093
|
108 |
+
1094
|
109 |
+
1095
|
110 |
+
1096
|
111 |
+
1097
|
112 |
+
1098
|
113 |
+
1099
|
114 |
+
11
|
115 |
+
110
|
116 |
+
1100
|
117 |
+
1101
|
118 |
+
1102
|
119 |
+
1103
|
120 |
+
1104
|
121 |
+
1105
|
122 |
+
1106
|
123 |
+
1107
|
124 |
+
1108
|
125 |
+
1109
|
126 |
+
111
|
127 |
+
1110
|
128 |
+
1111
|
129 |
+
1112
|
130 |
+
1113
|
131 |
+
1114
|
132 |
+
1115
|
133 |
+
1116
|
134 |
+
1117
|
135 |
+
1118
|
136 |
+
1119
|
137 |
+
112
|
138 |
+
1120
|
139 |
+
1121
|
140 |
+
1122
|
141 |
+
1123
|
142 |
+
1124
|
143 |
+
1125
|
144 |
+
1126
|
145 |
+
1127
|
146 |
+
1128
|
147 |
+
1129
|
148 |
+
113
|
149 |
+
1130
|
150 |
+
1131
|
151 |
+
1132
|
152 |
+
1133
|
153 |
+
1134
|
154 |
+
1135
|
155 |
+
1136
|
156 |
+
1137
|
157 |
+
1138
|
158 |
+
1139
|
159 |
+
114
|
160 |
+
1140
|
161 |
+
1141
|
162 |
+
1142
|
163 |
+
1143
|
164 |
+
1144
|
165 |
+
1145
|
166 |
+
1146
|
167 |
+
1147
|
168 |
+
1148
|
169 |
+
1149
|
170 |
+
115
|
171 |
+
1150
|
172 |
+
1151
|
173 |
+
1152
|
174 |
+
1153
|
175 |
+
1154
|
176 |
+
1155
|
177 |
+
1156
|
178 |
+
1157
|
179 |
+
1158
|
180 |
+
1159
|
181 |
+
116
|
182 |
+
1160
|
183 |
+
1161
|
184 |
+
1162
|
185 |
+
1163
|
186 |
+
1164
|
187 |
+
1165
|
188 |
+
1166
|
189 |
+
1167
|
190 |
+
1168
|
191 |
+
1169
|
192 |
+
117
|
193 |
+
1170
|
194 |
+
1171
|
195 |
+
1172
|
196 |
+
1173
|
197 |
+
1174
|
198 |
+
1175
|
199 |
+
1176
|
200 |
+
1177
|
201 |
+
1178
|
202 |
+
1179
|
203 |
+
118
|
204 |
+
1180
|
205 |
+
1181
|
206 |
+
1182
|
207 |
+
1183
|
208 |
+
1184
|
209 |
+
1185
|
210 |
+
1186
|
211 |
+
1187
|
212 |
+
1188
|
213 |
+
1189
|
214 |
+
119
|
215 |
+
1190
|
216 |
+
1191
|
217 |
+
1192
|
218 |
+
1193
|
219 |
+
1194
|
220 |
+
1195
|
221 |
+
1196
|
222 |
+
1197
|
223 |
+
1198
|
224 |
+
1199
|
225 |
+
12
|
226 |
+
120
|
227 |
+
1200
|
228 |
+
1201
|
229 |
+
1202
|
230 |
+
1203
|
231 |
+
1204
|
232 |
+
1205
|
233 |
+
1206
|
234 |
+
1207
|
235 |
+
1208
|
236 |
+
1209
|
237 |
+
121
|
238 |
+
1210
|
239 |
+
1211
|
240 |
+
1212
|
241 |
+
1213
|
242 |
+
1214
|
243 |
+
1215
|
244 |
+
1216
|
245 |
+
1217
|
246 |
+
1218
|
247 |
+
1219
|
248 |
+
122
|
249 |
+
1220
|
250 |
+
1221
|
251 |
+
1222
|
252 |
+
1223
|
253 |
+
1224
|
254 |
+
1225
|
255 |
+
1226
|
256 |
+
1227
|
257 |
+
1228
|
258 |
+
1229
|
259 |
+
123
|
260 |
+
1230
|
261 |
+
1231
|
262 |
+
1232
|
263 |
+
1233
|
264 |
+
1234
|
265 |
+
1235
|
266 |
+
1236
|
267 |
+
1237
|
268 |
+
1238
|
269 |
+
1239
|
270 |
+
124
|
271 |
+
1240
|
272 |
+
1241
|
273 |
+
1242
|
274 |
+
1243
|
275 |
+
1244
|
276 |
+
1245
|
277 |
+
1246
|
278 |
+
1247
|
279 |
+
1248
|
280 |
+
1249
|
281 |
+
125
|
282 |
+
1250
|
283 |
+
1251
|
284 |
+
1252
|
285 |
+
1253
|
286 |
+
1254
|
287 |
+
1255
|
288 |
+
1256
|
289 |
+
1257
|
290 |
+
1258
|
291 |
+
1259
|
292 |
+
126
|
293 |
+
1260
|
294 |
+
1261
|
295 |
+
1262
|
296 |
+
1263
|
297 |
+
1264
|
298 |
+
1265
|
299 |
+
1266
|
300 |
+
1267
|
301 |
+
1268
|
302 |
+
1269
|
303 |
+
127
|
304 |
+
1270
|
305 |
+
1271
|
306 |
+
1272
|
307 |
+
1273
|
308 |
+
1274
|
309 |
+
1275
|
310 |
+
1276
|
311 |
+
1277
|
312 |
+
1278
|
313 |
+
1279
|
314 |
+
128
|
315 |
+
1280
|
316 |
+
1281
|
317 |
+
1282
|
318 |
+
1283
|
319 |
+
1284
|
320 |
+
1285
|
321 |
+
1286
|
322 |
+
1287
|
323 |
+
1288
|
324 |
+
1289
|
325 |
+
129
|
326 |
+
1290
|
327 |
+
1291
|
328 |
+
1292
|
329 |
+
1293
|
330 |
+
1294
|
331 |
+
1295
|
332 |
+
1296
|
333 |
+
1297
|
334 |
+
1298
|
335 |
+
1299
|
336 |
+
13
|
337 |
+
130
|
338 |
+
1300
|
339 |
+
1301
|
340 |
+
1302
|
341 |
+
1303
|
342 |
+
1304
|
343 |
+
1305
|
344 |
+
1306
|
345 |
+
1307
|
346 |
+
1308
|
347 |
+
1309
|
348 |
+
131
|
349 |
+
1310
|
350 |
+
1311
|
351 |
+
1312
|
352 |
+
1313
|
353 |
+
1314
|
354 |
+
1315
|
355 |
+
1316
|
356 |
+
1317
|
357 |
+
1318
|
358 |
+
1319
|
359 |
+
132
|
360 |
+
1320
|
361 |
+
1321
|
362 |
+
1322
|
363 |
+
1323
|
364 |
+
1324
|
365 |
+
1325
|
366 |
+
1326
|
367 |
+
1327
|
368 |
+
1328
|
369 |
+
1329
|
370 |
+
133
|
371 |
+
1330
|
372 |
+
1331
|
373 |
+
1332
|
374 |
+
1333
|
375 |
+
1334
|
376 |
+
1335
|
377 |
+
1336
|
378 |
+
1337
|
379 |
+
1338
|
380 |
+
1339
|
381 |
+
134
|
382 |
+
1340
|
383 |
+
1341
|
384 |
+
1342
|
385 |
+
1343
|
386 |
+
1344
|
387 |
+
1345
|
388 |
+
1346
|
389 |
+
1347
|
390 |
+
1348
|
391 |
+
1349
|
392 |
+
135
|
393 |
+
1350
|
394 |
+
1351
|
395 |
+
1352
|
396 |
+
1353
|
397 |
+
1354
|
398 |
+
1355
|
399 |
+
1356
|
400 |
+
1357
|
401 |
+
1358
|
402 |
+
1359
|
403 |
+
136
|
404 |
+
1360
|
405 |
+
1361
|
406 |
+
1362
|
407 |
+
1363
|
408 |
+
1364
|
409 |
+
1365
|
410 |
+
1366
|
411 |
+
1367
|
412 |
+
1368
|
413 |
+
1369
|
414 |
+
137
|
415 |
+
1370
|
416 |
+
1371
|
417 |
+
1372
|
418 |
+
1373
|
419 |
+
1374
|
420 |
+
1375
|
421 |
+
1376
|
422 |
+
1377
|
423 |
+
1378
|
424 |
+
1379
|
425 |
+
138
|
426 |
+
1380
|
427 |
+
1381
|
428 |
+
1382
|
429 |
+
1383
|
430 |
+
1384
|
431 |
+
1385
|
432 |
+
1386
|
433 |
+
1387
|
434 |
+
1388
|
435 |
+
1389
|
436 |
+
139
|
437 |
+
1390
|
438 |
+
1391
|
439 |
+
1392
|
440 |
+
1393
|
441 |
+
1394
|
442 |
+
1395
|
443 |
+
1396
|
444 |
+
1397
|
445 |
+
1398
|
446 |
+
1399
|
447 |
+
14
|
448 |
+
140
|
449 |
+
1400
|
450 |
+
1401
|
451 |
+
1402
|
452 |
+
1403
|
453 |
+
1404
|
454 |
+
1405
|
455 |
+
1406
|
456 |
+
1407
|
457 |
+
1408
|
458 |
+
1409
|
459 |
+
141
|
460 |
+
1410
|
461 |
+
1411
|
462 |
+
1412
|
463 |
+
1413
|
464 |
+
1414
|
465 |
+
1415
|
466 |
+
1416
|
467 |
+
1417
|
468 |
+
1418
|
469 |
+
1419
|
470 |
+
142
|
471 |
+
1420
|
472 |
+
1421
|
473 |
+
1422
|
474 |
+
1423
|
475 |
+
1424
|
476 |
+
1425
|
477 |
+
1426
|
478 |
+
1427
|
479 |
+
1428
|
480 |
+
1429
|
481 |
+
143
|
482 |
+
1430
|
483 |
+
1431
|
484 |
+
1432
|
485 |
+
1433
|
486 |
+
1434
|
487 |
+
1435
|
488 |
+
1436
|
489 |
+
1437
|
490 |
+
1438
|
491 |
+
1439
|
492 |
+
144
|
493 |
+
1440
|
494 |
+
1441
|
495 |
+
1442
|
496 |
+
1443
|
497 |
+
1444
|
498 |
+
1445
|
499 |
+
1446
|
500 |
+
1447
|
501 |
+
1448
|
502 |
+
1449
|
503 |
+
145
|
504 |
+
1450
|
505 |
+
1451
|
506 |
+
1452
|
507 |
+
1453
|
508 |
+
1454
|
509 |
+
1455
|
510 |
+
1456
|
511 |
+
1457
|
512 |
+
1458
|
513 |
+
1459
|
514 |
+
146
|
515 |
+
1460
|
516 |
+
1461
|
517 |
+
1462
|
518 |
+
1463
|
519 |
+
1464
|
520 |
+
1465
|
521 |
+
1466
|
522 |
+
1467
|
523 |
+
1468
|
524 |
+
1469
|
525 |
+
147
|
526 |
+
1470
|
527 |
+
1471
|
528 |
+
1472
|
529 |
+
1473
|
530 |
+
1474
|
531 |
+
1475
|
532 |
+
1476
|
533 |
+
1477
|
534 |
+
1478
|
535 |
+
1479
|
536 |
+
148
|
537 |
+
1480
|
538 |
+
1481
|
539 |
+
1482
|
540 |
+
1483
|
541 |
+
1484
|
542 |
+
1485
|
543 |
+
1486
|
544 |
+
1487
|
545 |
+
1488
|
546 |
+
1489
|
547 |
+
149
|
548 |
+
1490
|
549 |
+
1491
|
550 |
+
1492
|
551 |
+
1493
|
552 |
+
1494
|
553 |
+
1495
|
554 |
+
1496
|
555 |
+
1497
|
556 |
+
1498
|
557 |
+
1499
|
558 |
+
15
|
559 |
+
150
|
560 |
+
1500
|
561 |
+
1501
|
562 |
+
1502
|
563 |
+
1503
|
564 |
+
1504
|
565 |
+
1505
|
566 |
+
1506
|
567 |
+
1507
|
568 |
+
1508
|
569 |
+
1509
|
570 |
+
151
|
571 |
+
1510
|
572 |
+
1511
|
573 |
+
1512
|
574 |
+
1513
|
575 |
+
1514
|
576 |
+
1515
|
577 |
+
1516
|
578 |
+
1517
|
579 |
+
1518
|
580 |
+
1519
|
581 |
+
152
|
582 |
+
1520
|
583 |
+
1521
|
584 |
+
1522
|
585 |
+
1523
|
586 |
+
1524
|
587 |
+
1525
|
588 |
+
1526
|
589 |
+
1527
|
590 |
+
1528
|
591 |
+
1529
|
592 |
+
153
|
593 |
+
1530
|
594 |
+
1531
|
595 |
+
1532
|
596 |
+
1533
|
597 |
+
1534
|
598 |
+
1535
|
599 |
+
1536
|
600 |
+
1537
|
601 |
+
1538
|
602 |
+
1539
|
603 |
+
154
|
604 |
+
1540
|
605 |
+
1541
|
606 |
+
1542
|
607 |
+
1543
|
608 |
+
1544
|
609 |
+
1545
|
610 |
+
1546
|
611 |
+
1547
|
612 |
+
1548
|
613 |
+
1549
|
614 |
+
155
|
615 |
+
1550
|
616 |
+
1551
|
617 |
+
1552
|
618 |
+
1553
|
619 |
+
1554
|
620 |
+
1555
|
621 |
+
1556
|
622 |
+
1557
|
623 |
+
1558
|
624 |
+
1559
|
625 |
+
156
|
626 |
+
1560
|
627 |
+
1561
|
628 |
+
1562
|
629 |
+
1563
|
630 |
+
1564
|
631 |
+
1565
|
632 |
+
1566
|
633 |
+
1567
|
634 |
+
1568
|
635 |
+
1569
|
636 |
+
157
|
637 |
+
1570
|
638 |
+
1571
|
639 |
+
1572
|
640 |
+
1573
|
641 |
+
1574
|
642 |
+
1575
|
643 |
+
1576
|
644 |
+
1577
|
645 |
+
1578
|
646 |
+
1579
|
647 |
+
158
|
648 |
+
1580
|
649 |
+
1581
|
650 |
+
1582
|
651 |
+
1583
|
652 |
+
1584
|
653 |
+
1585
|
654 |
+
1586
|
655 |
+
1587
|
656 |
+
1588
|
657 |
+
1589
|
658 |
+
159
|
659 |
+
1590
|
660 |
+
1591
|
661 |
+
1592
|
662 |
+
1593
|
663 |
+
1594
|
664 |
+
1595
|
665 |
+
1596
|
666 |
+
1597
|
667 |
+
1598
|
668 |
+
1599
|
669 |
+
16
|
670 |
+
160
|
671 |
+
1600
|
672 |
+
1601
|
673 |
+
1602
|
674 |
+
1603
|
675 |
+
1604
|
676 |
+
1605
|
677 |
+
1606
|
678 |
+
1607
|
679 |
+
1608
|
680 |
+
1609
|
681 |
+
161
|
682 |
+
1610
|
683 |
+
1611
|
684 |
+
1612
|
685 |
+
1613
|
686 |
+
1614
|
687 |
+
1615
|
688 |
+
1616
|
689 |
+
1617
|
690 |
+
1618
|
691 |
+
1619
|
692 |
+
162
|
693 |
+
1620
|
694 |
+
1621
|
695 |
+
1622
|
696 |
+
1623
|
697 |
+
1624
|
698 |
+
1625
|
699 |
+
1626
|
700 |
+
1627
|
701 |
+
1628
|
702 |
+
1629
|
703 |
+
163
|
704 |
+
1630
|
705 |
+
1631
|
706 |
+
1632
|
707 |
+
1633
|
708 |
+
1634
|
709 |
+
1635
|
710 |
+
1636
|
711 |
+
1637
|
712 |
+
1638
|
713 |
+
1639
|
714 |
+
164
|
715 |
+
1640
|
716 |
+
1641
|
717 |
+
1642
|
718 |
+
1643
|
719 |
+
1644
|
720 |
+
1645
|
721 |
+
1646
|
722 |
+
1647
|
723 |
+
1648
|
724 |
+
1649
|
725 |
+
165
|
726 |
+
1650
|
727 |
+
1651
|
728 |
+
1652
|
729 |
+
1653
|
730 |
+
1654
|
731 |
+
1655
|
732 |
+
1656
|
733 |
+
1657
|
734 |
+
1658
|
735 |
+
1659
|
736 |
+
166
|
737 |
+
1660
|
738 |
+
1661
|
739 |
+
1662
|
740 |
+
1663
|
741 |
+
1664
|
742 |
+
1665
|
743 |
+
1666
|
744 |
+
1667
|
745 |
+
1668
|
746 |
+
1669
|
747 |
+
167
|
748 |
+
1670
|
749 |
+
1671
|
750 |
+
1672
|
751 |
+
1673
|
752 |
+
1674
|
753 |
+
1675
|
754 |
+
1676
|
755 |
+
1677
|
756 |
+
1678
|
757 |
+
1679
|
758 |
+
168
|
759 |
+
1680
|
760 |
+
1681
|
761 |
+
1682
|
762 |
+
1683
|
763 |
+
1684
|
764 |
+
1685
|
765 |
+
1686
|
766 |
+
1687
|
767 |
+
1688
|
768 |
+
1689
|
769 |
+
169
|
770 |
+
1690
|
771 |
+
1691
|
772 |
+
1692
|
773 |
+
1693
|
774 |
+
1694
|
775 |
+
1695
|
776 |
+
1696
|
777 |
+
1697
|
778 |
+
1698
|
779 |
+
1699
|
780 |
+
17
|
781 |
+
170
|
782 |
+
1700
|
783 |
+
1701
|
784 |
+
1702
|
785 |
+
1703
|
786 |
+
1704
|
787 |
+
1705
|
788 |
+
1706
|
789 |
+
1707
|
790 |
+
1708
|
791 |
+
1709
|
792 |
+
171
|
793 |
+
1710
|
794 |
+
1711
|
795 |
+
1712
|
796 |
+
1713
|
797 |
+
1714
|
798 |
+
1715
|
799 |
+
1716
|
800 |
+
1717
|
801 |
+
1718
|
802 |
+
1719
|
803 |
+
172
|
804 |
+
1720
|
805 |
+
1721
|
806 |
+
1722
|
807 |
+
1723
|
808 |
+
1724
|
809 |
+
1725
|
810 |
+
1726
|
811 |
+
1727
|
812 |
+
1728
|
813 |
+
1729
|
814 |
+
173
|
815 |
+
1730
|
816 |
+
1731
|
817 |
+
1732
|
818 |
+
1733
|
819 |
+
1734
|
820 |
+
1735
|
821 |
+
1736
|
822 |
+
1737
|
823 |
+
1738
|
824 |
+
1739
|
825 |
+
174
|
826 |
+
1740
|
827 |
+
1741
|
828 |
+
1742
|
829 |
+
1743
|
830 |
+
1744
|
831 |
+
1745
|
832 |
+
1746
|
833 |
+
1747
|
834 |
+
1748
|
835 |
+
1749
|
836 |
+
175
|
837 |
+
1750
|
838 |
+
1751
|
839 |
+
1752
|
840 |
+
1753
|
841 |
+
1754
|
842 |
+
1755
|
843 |
+
1756
|
844 |
+
1757
|
845 |
+
1758
|
846 |
+
1759
|
847 |
+
176
|
848 |
+
1760
|
849 |
+
1761
|
850 |
+
1762
|
851 |
+
1763
|
852 |
+
1764
|
853 |
+
1765
|
854 |
+
1766
|
855 |
+
1767
|
856 |
+
1768
|
857 |
+
1769
|
858 |
+
177
|
859 |
+
1770
|
860 |
+
1771
|
861 |
+
1772
|
862 |
+
1773
|
863 |
+
1774
|
864 |
+
1775
|
865 |
+
1776
|
866 |
+
1777
|
867 |
+
1778
|
868 |
+
1779
|
869 |
+
178
|
870 |
+
1780
|
871 |
+
1781
|
872 |
+
1782
|
873 |
+
1783
|
874 |
+
1784
|
875 |
+
1785
|
876 |
+
1786
|
877 |
+
1787
|
878 |
+
1788
|
879 |
+
1789
|
880 |
+
179
|
881 |
+
1790
|
882 |
+
1791
|
883 |
+
1792
|
884 |
+
1793
|
885 |
+
1794
|
886 |
+
1795
|
887 |
+
1796
|
888 |
+
1797
|
889 |
+
1798
|
890 |
+
1799
|
891 |
+
18
|
892 |
+
180
|
893 |
+
1800
|
894 |
+
1801
|
895 |
+
1802
|
896 |
+
1803
|
897 |
+
1804
|
898 |
+
1805
|
899 |
+
1806
|
900 |
+
1807
|
901 |
+
1808
|
902 |
+
1809
|
903 |
+
181
|
904 |
+
1810
|
905 |
+
1811
|
906 |
+
1812
|
907 |
+
1813
|
908 |
+
1814
|
909 |
+
1815
|
910 |
+
1816
|
911 |
+
1817
|
912 |
+
1818
|
913 |
+
1819
|
914 |
+
182
|
915 |
+
1820
|
916 |
+
1821
|
917 |
+
1822
|
918 |
+
1823
|
919 |
+
1824
|
920 |
+
1825
|
921 |
+
1826
|
922 |
+
1827
|
923 |
+
1828
|
924 |
+
1829
|
925 |
+
183
|
926 |
+
1830
|
927 |
+
1831
|
928 |
+
1832
|
929 |
+
1833
|
930 |
+
1834
|
931 |
+
1835
|
932 |
+
1836
|
933 |
+
1837
|
934 |
+
1838
|
935 |
+
1839
|
936 |
+
184
|
937 |
+
1840
|
938 |
+
1841
|
939 |
+
1842
|
940 |
+
1843
|
941 |
+
1844
|
942 |
+
1845
|
943 |
+
1846
|
944 |
+
1847
|
945 |
+
1848
|
946 |
+
1849
|
947 |
+
185
|
948 |
+
1850
|
949 |
+
1851
|
950 |
+
1852
|
951 |
+
1853
|
952 |
+
1854
|
953 |
+
1855
|
954 |
+
1856
|
955 |
+
1857
|
956 |
+
1858
|
957 |
+
1859
|
958 |
+
186
|
959 |
+
1860
|
960 |
+
1861
|
961 |
+
1862
|
962 |
+
1863
|
963 |
+
1864
|
964 |
+
1865
|
965 |
+
1866
|
966 |
+
1867
|
967 |
+
1868
|
968 |
+
1869
|
969 |
+
187
|
970 |
+
1870
|
971 |
+
1871
|
972 |
+
1872
|
973 |
+
1873
|
974 |
+
1874
|
975 |
+
1875
|
976 |
+
1876
|
977 |
+
1877
|
978 |
+
1878
|
979 |
+
1879
|
980 |
+
188
|
981 |
+
1880
|
982 |
+
1881
|
983 |
+
1882
|
984 |
+
1883
|
985 |
+
1884
|
986 |
+
1885
|
987 |
+
1886
|
988 |
+
1887
|
989 |
+
1888
|
990 |
+
1889
|
991 |
+
189
|
992 |
+
1890
|
993 |
+
1891
|
994 |
+
1892
|
995 |
+
1893
|
996 |
+
1894
|
997 |
+
1895
|
998 |
+
1896
|
999 |
+
1897
|
1000 |
+
1898
|
1001 |
+
1899
|
1002 |
+
19
|
1003 |
+
190
|
1004 |
+
1900
|
1005 |
+
1901
|
1006 |
+
1902
|
1007 |
+
1903
|
1008 |
+
1904
|
1009 |
+
1905
|
1010 |
+
1906
|
1011 |
+
1907
|
1012 |
+
1908
|
1013 |
+
1909
|
1014 |
+
191
|
1015 |
+
1910
|
1016 |
+
1911
|
1017 |
+
1912
|
1018 |
+
1913
|
1019 |
+
1914
|
1020 |
+
1915
|
1021 |
+
1916
|
1022 |
+
1917
|
1023 |
+
1918
|
1024 |
+
1919
|
1025 |
+
192
|
1026 |
+
1920
|
1027 |
+
1921
|
1028 |
+
1922
|
1029 |
+
1923
|
1030 |
+
1924
|
1031 |
+
1925
|
1032 |
+
1926
|
1033 |
+
1927
|
1034 |
+
1928
|
1035 |
+
1929
|
1036 |
+
193
|
1037 |
+
1930
|
1038 |
+
1931
|
1039 |
+
1932
|
1040 |
+
1933
|
1041 |
+
1934
|
1042 |
+
1935
|
1043 |
+
1936
|
1044 |
+
1937
|
1045 |
+
1938
|
1046 |
+
1939
|
1047 |
+
194
|
1048 |
+
1940
|
1049 |
+
1941
|
1050 |
+
1942
|
1051 |
+
1943
|
1052 |
+
1944
|
1053 |
+
1945
|
1054 |
+
1946
|
1055 |
+
1947
|
1056 |
+
1948
|
1057 |
+
1949
|
1058 |
+
195
|
1059 |
+
1950
|
1060 |
+
1951
|
1061 |
+
1952
|
1062 |
+
1953
|
1063 |
+
1954
|
1064 |
+
1955
|
1065 |
+
1956
|
1066 |
+
1957
|
1067 |
+
1958
|
1068 |
+
1959
|
1069 |
+
196
|
1070 |
+
1960
|
1071 |
+
1961
|
1072 |
+
1962
|
1073 |
+
1963
|
1074 |
+
1964
|
1075 |
+
1965
|
1076 |
+
1966
|
1077 |
+
1967
|
1078 |
+
1968
|
1079 |
+
1969
|
1080 |
+
197
|
1081 |
+
1970
|
1082 |
+
1971
|
1083 |
+
1972
|
1084 |
+
1973
|
1085 |
+
1974
|
1086 |
+
1975
|
1087 |
+
1976
|
1088 |
+
1977
|
1089 |
+
1978
|
1090 |
+
1979
|
1091 |
+
198
|
1092 |
+
1980
|
1093 |
+
1981
|
1094 |
+
1982
|
1095 |
+
1983
|
1096 |
+
1984
|
1097 |
+
1985
|
1098 |
+
1986
|
1099 |
+
1987
|
1100 |
+
1988
|
1101 |
+
1989
|
1102 |
+
199
|
1103 |
+
1990
|
1104 |
+
1991
|
1105 |
+
1992
|
1106 |
+
1993
|
1107 |
+
1994
|
1108 |
+
1995
|
1109 |
+
1996
|
1110 |
+
1997
|
1111 |
+
1998
|
1112 |
+
1999
|
1113 |
+
2
|
1114 |
+
20
|
1115 |
+
200
|
1116 |
+
2000
|
1117 |
+
2001
|
1118 |
+
2002
|
1119 |
+
2003
|
1120 |
+
2004
|
1121 |
+
2005
|
1122 |
+
2006
|
1123 |
+
2007
|
1124 |
+
2008
|
1125 |
+
2009
|
1126 |
+
201
|
1127 |
+
2010
|
1128 |
+
2011
|
1129 |
+
2012
|
1130 |
+
2013
|
1131 |
+
2014
|
1132 |
+
2015
|
1133 |
+
2016
|
1134 |
+
2017
|
1135 |
+
2018
|
1136 |
+
2019
|
1137 |
+
202
|
1138 |
+
2020
|
1139 |
+
2021
|
1140 |
+
2022
|
1141 |
+
2023
|
1142 |
+
2024
|
1143 |
+
2025
|
1144 |
+
2026
|
1145 |
+
2027
|
1146 |
+
2028
|
1147 |
+
2029
|
1148 |
+
203
|
1149 |
+
2030
|
1150 |
+
2031
|
1151 |
+
2032
|
1152 |
+
2033
|
1153 |
+
2034
|
1154 |
+
2035
|
1155 |
+
2036
|
1156 |
+
2037
|
1157 |
+
2038
|
1158 |
+
2039
|
1159 |
+
204
|
1160 |
+
2040
|
1161 |
+
2041
|
1162 |
+
2042
|
1163 |
+
2043
|
1164 |
+
2044
|
1165 |
+
2045
|
1166 |
+
2046
|
1167 |
+
2047
|
1168 |
+
2048
|
1169 |
+
2049
|
1170 |
+
205
|
1171 |
+
2050
|
1172 |
+
2051
|
1173 |
+
2052
|
1174 |
+
2053
|
1175 |
+
2054
|
1176 |
+
2055
|
1177 |
+
2056
|
1178 |
+
2057
|
1179 |
+
2058
|
1180 |
+
2059
|
1181 |
+
206
|
1182 |
+
2060
|
1183 |
+
2061
|
1184 |
+
2062
|
1185 |
+
2063
|
1186 |
+
2064
|
1187 |
+
2065
|
1188 |
+
2066
|
1189 |
+
2067
|
1190 |
+
2068
|
1191 |
+
2069
|
1192 |
+
207
|
1193 |
+
2070
|
1194 |
+
2071
|
1195 |
+
2072
|
1196 |
+
2073
|
1197 |
+
2074
|
1198 |
+
2075
|
1199 |
+
2076
|
1200 |
+
2077
|
1201 |
+
2078
|
1202 |
+
2079
|
1203 |
+
208
|
1204 |
+
2080
|
1205 |
+
2081
|
1206 |
+
2082
|
1207 |
+
2083
|
1208 |
+
2084
|
1209 |
+
2085
|
1210 |
+
2086
|
1211 |
+
2087
|
1212 |
+
2088
|
1213 |
+
2089
|
1214 |
+
209
|
1215 |
+
2090
|
1216 |
+
2091
|
1217 |
+
2092
|
1218 |
+
2093
|
1219 |
+
2094
|
1220 |
+
2095
|
1221 |
+
2096
|
1222 |
+
2097
|
1223 |
+
2098
|
1224 |
+
2099
|
1225 |
+
21
|
1226 |
+
210
|
1227 |
+
2100
|
1228 |
+
2101
|
1229 |
+
2102
|
1230 |
+
2103
|
1231 |
+
2104
|
1232 |
+
2105
|
1233 |
+
2106
|
1234 |
+
2107
|
1235 |
+
2108
|
1236 |
+
2109
|
1237 |
+
211
|
1238 |
+
2110
|
1239 |
+
2111
|
1240 |
+
2112
|
1241 |
+
2113
|
1242 |
+
2114
|
1243 |
+
2115
|
1244 |
+
2116
|
1245 |
+
2117
|
1246 |
+
2118
|
1247 |
+
2119
|
1248 |
+
212
|
1249 |
+
2120
|
1250 |
+
2121
|
1251 |
+
2122
|
1252 |
+
2123
|
1253 |
+
2124
|
1254 |
+
2125
|
1255 |
+
2126
|
1256 |
+
2127
|
1257 |
+
2128
|
1258 |
+
2129
|
1259 |
+
213
|
1260 |
+
2130
|
1261 |
+
2131
|
1262 |
+
2132
|
1263 |
+
2133
|
1264 |
+
2134
|
1265 |
+
2135
|
1266 |
+
2136
|
1267 |
+
2137
|
1268 |
+
2138
|
1269 |
+
2139
|
1270 |
+
214
|
1271 |
+
2140
|
1272 |
+
2141
|
1273 |
+
2142
|
1274 |
+
2143
|
1275 |
+
2144
|
1276 |
+
2145
|
1277 |
+
2146
|
1278 |
+
2147
|
1279 |
+
2148
|
1280 |
+
2149
|
1281 |
+
215
|
1282 |
+
2150
|
1283 |
+
2151
|
1284 |
+
2152
|
1285 |
+
2153
|
1286 |
+
2154
|
1287 |
+
2155
|
1288 |
+
2156
|
1289 |
+
2157
|
1290 |
+
2158
|
1291 |
+
2159
|
1292 |
+
216
|
1293 |
+
2160
|
1294 |
+
2161
|
1295 |
+
2162
|
1296 |
+
2163
|
1297 |
+
2164
|
1298 |
+
2165
|
1299 |
+
2166
|
1300 |
+
2167
|
1301 |
+
2168
|
1302 |
+
2169
|
1303 |
+
217
|
1304 |
+
2170
|
1305 |
+
2171
|
1306 |
+
2172
|
1307 |
+
2173
|
1308 |
+
2174
|
1309 |
+
2175
|
1310 |
+
2176
|
1311 |
+
2177
|
1312 |
+
2178
|
1313 |
+
2179
|
1314 |
+
218
|
1315 |
+
2180
|
1316 |
+
2181
|
1317 |
+
2182
|
1318 |
+
2183
|
1319 |
+
2184
|
1320 |
+
2185
|
1321 |
+
2186
|
1322 |
+
2187
|
1323 |
+
2188
|
1324 |
+
2189
|
1325 |
+
219
|
1326 |
+
2190
|
1327 |
+
2191
|
1328 |
+
2192
|
1329 |
+
2193
|
1330 |
+
2194
|
1331 |
+
2195
|
1332 |
+
2196
|
1333 |
+
2197
|
1334 |
+
2198
|
1335 |
+
2199
|
1336 |
+
22
|
1337 |
+
220
|
1338 |
+
2200
|
1339 |
+
2201
|
1340 |
+
2202
|
1341 |
+
2203
|
1342 |
+
2204
|
1343 |
+
2205
|
1344 |
+
2206
|
1345 |
+
2207
|
1346 |
+
2208
|
1347 |
+
2209
|
1348 |
+
221
|
1349 |
+
2210
|
1350 |
+
2211
|
1351 |
+
2212
|
1352 |
+
2213
|
1353 |
+
2214
|
1354 |
+
2215
|
1355 |
+
2216
|
1356 |
+
2217
|
1357 |
+
2218
|
1358 |
+
2219
|
1359 |
+
222
|
1360 |
+
2220
|
1361 |
+
2221
|
1362 |
+
2222
|
1363 |
+
2223
|
1364 |
+
2224
|
1365 |
+
2225
|
1366 |
+
2226
|
1367 |
+
2227
|
1368 |
+
2228
|
1369 |
+
2229
|
1370 |
+
223
|
1371 |
+
2230
|
1372 |
+
2231
|
1373 |
+
2232
|
1374 |
+
2233
|
1375 |
+
2234
|
1376 |
+
2235
|
1377 |
+
2236
|
1378 |
+
2237
|
1379 |
+
2238
|
1380 |
+
2239
|
1381 |
+
224
|
1382 |
+
2240
|
1383 |
+
2241
|
1384 |
+
2242
|
1385 |
+
2243
|
1386 |
+
2244
|
1387 |
+
2245
|
1388 |
+
2246
|
1389 |
+
2247
|
1390 |
+
2248
|
1391 |
+
2249
|
1392 |
+
225
|
1393 |
+
2250
|
1394 |
+
2251
|
1395 |
+
2252
|
1396 |
+
2253
|
1397 |
+
2254
|
1398 |
+
2255
|
1399 |
+
2256
|
1400 |
+
2257
|
1401 |
+
2258
|
1402 |
+
2259
|
1403 |
+
226
|
1404 |
+
2260
|
1405 |
+
2261
|
1406 |
+
2262
|
1407 |
+
2263
|
1408 |
+
2264
|
1409 |
+
2265
|
1410 |
+
2266
|
1411 |
+
2267
|
1412 |
+
2268
|
1413 |
+
2269
|
1414 |
+
227
|
1415 |
+
2270
|
1416 |
+
2271
|
1417 |
+
2272
|
1418 |
+
2273
|
1419 |
+
2274
|
1420 |
+
2275
|
1421 |
+
2276
|
1422 |
+
2277
|
1423 |
+
2278
|
1424 |
+
2279
|
1425 |
+
228
|
1426 |
+
2280
|
1427 |
+
2281
|
1428 |
+
2282
|
1429 |
+
2283
|
1430 |
+
2284
|
1431 |
+
2285
|
1432 |
+
2286
|
1433 |
+
2287
|
1434 |
+
2288
|
1435 |
+
2289
|
1436 |
+
229
|
1437 |
+
2290
|
1438 |
+
2291
|
1439 |
+
2292
|
1440 |
+
2293
|
1441 |
+
2294
|
1442 |
+
2295
|
1443 |
+
2296
|
1444 |
+
2297
|
1445 |
+
2298
|
1446 |
+
2299
|
1447 |
+
23
|
1448 |
+
230
|
1449 |
+
2300
|
1450 |
+
2301
|
1451 |
+
2302
|
1452 |
+
2303
|
1453 |
+
2304
|
1454 |
+
2305
|
1455 |
+
2306
|
1456 |
+
2307
|
1457 |
+
2308
|
1458 |
+
2309
|
1459 |
+
231
|
1460 |
+
2310
|
1461 |
+
2311
|
1462 |
+
2312
|
1463 |
+
2313
|
1464 |
+
2314
|
1465 |
+
2315
|
1466 |
+
2316
|
1467 |
+
2317
|
1468 |
+
2318
|
1469 |
+
2319
|
1470 |
+
232
|
1471 |
+
2320
|
1472 |
+
2321
|
1473 |
+
2322
|
1474 |
+
2323
|
1475 |
+
2324
|
1476 |
+
2325
|
1477 |
+
2326
|
1478 |
+
2327
|
1479 |
+
2328
|
1480 |
+
2329
|
1481 |
+
233
|
1482 |
+
2330
|
1483 |
+
2331
|
1484 |
+
2332
|
1485 |
+
2333
|
1486 |
+
2334
|
1487 |
+
2335
|
1488 |
+
2336
|
1489 |
+
2337
|
1490 |
+
2338
|
1491 |
+
2339
|
1492 |
+
234
|
1493 |
+
2340
|
1494 |
+
2341
|
1495 |
+
2342
|
1496 |
+
2343
|
1497 |
+
2344
|
1498 |
+
2345
|
1499 |
+
2346
|
1500 |
+
2347
|
1501 |
+
2348
|
1502 |
+
2349
|
1503 |
+
235
|
1504 |
+
2350
|
1505 |
+
2351
|
1506 |
+
2352
|
1507 |
+
2353
|
1508 |
+
2354
|
1509 |
+
2355
|
1510 |
+
2356
|
1511 |
+
2357
|
1512 |
+
2358
|
1513 |
+
2359
|
1514 |
+
236
|
1515 |
+
2360
|
1516 |
+
2361
|
1517 |
+
2362
|
1518 |
+
2363
|
1519 |
+
2364
|
1520 |
+
2365
|
1521 |
+
2366
|
1522 |
+
2367
|
1523 |
+
2368
|
1524 |
+
2369
|
1525 |
+
237
|
1526 |
+
2370
|
1527 |
+
2371
|
1528 |
+
2372
|
1529 |
+
2373
|
1530 |
+
2374
|
1531 |
+
2375
|
1532 |
+
2376
|
1533 |
+
2377
|
1534 |
+
2378
|
1535 |
+
2379
|
1536 |
+
238
|
1537 |
+
2380
|
1538 |
+
2381
|
1539 |
+
2382
|
1540 |
+
2383
|
1541 |
+
2384
|
1542 |
+
2385
|
1543 |
+
2386
|
1544 |
+
2387
|
1545 |
+
2388
|
1546 |
+
2389
|
1547 |
+
239
|
1548 |
+
2390
|
1549 |
+
2391
|
1550 |
+
2392
|
1551 |
+
2393
|
1552 |
+
2394
|
1553 |
+
2395
|
1554 |
+
2396
|
1555 |
+
2397
|
1556 |
+
2398
|
1557 |
+
2399
|
1558 |
+
24
|
1559 |
+
240
|
1560 |
+
2400
|
1561 |
+
2401
|
1562 |
+
2402
|
1563 |
+
2403
|
1564 |
+
2404
|
1565 |
+
2405
|
1566 |
+
2406
|
1567 |
+
2407
|
1568 |
+
2408
|
1569 |
+
2409
|
1570 |
+
241
|
1571 |
+
2410
|
1572 |
+
2411
|
1573 |
+
2412
|
1574 |
+
2413
|
1575 |
+
2414
|
1576 |
+
2415
|
1577 |
+
2416
|
1578 |
+
2417
|
1579 |
+
2418
|
1580 |
+
2419
|
1581 |
+
242
|
1582 |
+
2420
|
1583 |
+
2421
|
1584 |
+
2422
|
1585 |
+
2423
|
1586 |
+
2424
|
1587 |
+
2425
|
1588 |
+
2426
|
1589 |
+
2427
|
1590 |
+
2428
|
1591 |
+
2429
|
1592 |
+
243
|
1593 |
+
2430
|
1594 |
+
2431
|
1595 |
+
2432
|
1596 |
+
2433
|
1597 |
+
2434
|
1598 |
+
2435
|
1599 |
+
2436
|
1600 |
+
2437
|
1601 |
+
2438
|
1602 |
+
2439
|
1603 |
+
244
|
1604 |
+
2440
|
1605 |
+
2441
|
1606 |
+
2442
|
1607 |
+
2443
|
1608 |
+
2444
|
1609 |
+
2445
|
1610 |
+
2446
|
1611 |
+
2447
|
1612 |
+
2448
|
1613 |
+
2449
|
1614 |
+
245
|
1615 |
+
2450
|
1616 |
+
2451
|
1617 |
+
2452
|
1618 |
+
2453
|
1619 |
+
2454
|
1620 |
+
2455
|
1621 |
+
2456
|
1622 |
+
2457
|
1623 |
+
2458
|
1624 |
+
2459
|
1625 |
+
246
|
1626 |
+
2460
|
1627 |
+
2461
|
1628 |
+
2462
|
1629 |
+
2463
|
1630 |
+
2464
|
1631 |
+
2465
|
1632 |
+
2466
|
1633 |
+
2467
|
1634 |
+
2468
|
1635 |
+
2469
|
1636 |
+
247
|
1637 |
+
2470
|
1638 |
+
2471
|
1639 |
+
2472
|
1640 |
+
2473
|
1641 |
+
2474
|
1642 |
+
2475
|
1643 |
+
2476
|
1644 |
+
2477
|
1645 |
+
2478
|
1646 |
+
2479
|
1647 |
+
248
|
1648 |
+
2480
|
1649 |
+
2481
|
1650 |
+
2482
|
1651 |
+
2483
|
1652 |
+
2484
|
1653 |
+
2485
|
1654 |
+
2486
|
1655 |
+
2487
|
1656 |
+
2488
|
1657 |
+
2489
|
1658 |
+
249
|
1659 |
+
2490
|
1660 |
+
2491
|
1661 |
+
2492
|
1662 |
+
2493
|
1663 |
+
2494
|
1664 |
+
2495
|
1665 |
+
2496
|
1666 |
+
2497
|
1667 |
+
2498
|
1668 |
+
2499
|
1669 |
+
25
|
1670 |
+
250
|
1671 |
+
2500
|
1672 |
+
2501
|
1673 |
+
2502
|
1674 |
+
2503
|
1675 |
+
2504
|
1676 |
+
2505
|
1677 |
+
2506
|
1678 |
+
2507
|
1679 |
+
2508
|
1680 |
+
2509
|
1681 |
+
251
|
1682 |
+
2510
|
1683 |
+
2511
|
1684 |
+
2512
|
1685 |
+
2513
|
1686 |
+
2514
|
1687 |
+
2515
|
1688 |
+
2516
|
1689 |
+
2517
|
1690 |
+
2518
|
1691 |
+
2519
|
1692 |
+
252
|
1693 |
+
2520
|
1694 |
+
2521
|
1695 |
+
2522
|
1696 |
+
2523
|
1697 |
+
2524
|
1698 |
+
2525
|
1699 |
+
2526
|
1700 |
+
2527
|
1701 |
+
2528
|
1702 |
+
2529
|
1703 |
+
253
|
1704 |
+
2530
|
1705 |
+
2531
|
1706 |
+
2532
|
1707 |
+
2533
|
1708 |
+
2534
|
1709 |
+
2535
|
1710 |
+
2536
|
1711 |
+
2537
|
1712 |
+
2538
|
1713 |
+
2539
|
1714 |
+
254
|
1715 |
+
2540
|
1716 |
+
2541
|
1717 |
+
2542
|
1718 |
+
2543
|
1719 |
+
2544
|
1720 |
+
2545
|
1721 |
+
2546
|
1722 |
+
2547
|
1723 |
+
2548
|
1724 |
+
2549
|
1725 |
+
255
|
1726 |
+
2550
|
1727 |
+
2551
|
1728 |
+
2552
|
1729 |
+
2553
|
1730 |
+
2554
|
1731 |
+
2555
|
1732 |
+
2556
|
1733 |
+
2557
|
1734 |
+
2558
|
1735 |
+
2559
|
1736 |
+
256
|
1737 |
+
2560
|
1738 |
+
2561
|
1739 |
+
2562
|
1740 |
+
2563
|
1741 |
+
2564
|
1742 |
+
2565
|
1743 |
+
2566
|
1744 |
+
2567
|
1745 |
+
2568
|
1746 |
+
2569
|
1747 |
+
257
|
1748 |
+
2570
|
1749 |
+
2571
|
1750 |
+
2572
|
1751 |
+
2573
|
1752 |
+
2574
|
1753 |
+
2575
|
1754 |
+
2576
|
1755 |
+
2577
|
1756 |
+
2578
|
1757 |
+
2579
|
1758 |
+
258
|
1759 |
+
2580
|
1760 |
+
2581
|
1761 |
+
2582
|
1762 |
+
2583
|
1763 |
+
2584
|
1764 |
+
2585
|
1765 |
+
2586
|
1766 |
+
2587
|
1767 |
+
2588
|
1768 |
+
2589
|
1769 |
+
259
|
1770 |
+
2590
|
1771 |
+
2591
|
1772 |
+
2592
|
1773 |
+
2593
|
1774 |
+
2594
|
1775 |
+
2595
|
1776 |
+
2596
|
1777 |
+
2597
|
1778 |
+
2598
|
1779 |
+
2599
|
1780 |
+
26
|
1781 |
+
260
|
1782 |
+
2600
|
1783 |
+
2601
|
1784 |
+
2602
|
1785 |
+
2603
|
1786 |
+
2604
|
1787 |
+
2605
|
1788 |
+
2606
|
1789 |
+
2607
|
1790 |
+
2608
|
1791 |
+
2609
|
1792 |
+
261
|
1793 |
+
2610
|
1794 |
+
2611
|
1795 |
+
2612
|
1796 |
+
2613
|
1797 |
+
2614
|
1798 |
+
2615
|
1799 |
+
2616
|
1800 |
+
2617
|
1801 |
+
2618
|
1802 |
+
2619
|
1803 |
+
262
|
1804 |
+
2620
|
1805 |
+
2621
|
1806 |
+
2622
|
1807 |
+
2623
|
1808 |
+
2624
|
1809 |
+
2625
|
1810 |
+
2626
|
1811 |
+
2627
|
1812 |
+
2628
|
1813 |
+
2629
|
1814 |
+
263
|
1815 |
+
2630
|
1816 |
+
2631
|
1817 |
+
2632
|
1818 |
+
2633
|
1819 |
+
2634
|
1820 |
+
2635
|
1821 |
+
2636
|
1822 |
+
2637
|
1823 |
+
2638
|
1824 |
+
2639
|
1825 |
+
264
|
1826 |
+
2640
|
1827 |
+
2641
|
1828 |
+
2642
|
1829 |
+
2643
|
1830 |
+
2644
|
1831 |
+
2645
|
1832 |
+
2646
|
1833 |
+
2647
|
1834 |
+
2648
|
1835 |
+
2649
|
1836 |
+
265
|
1837 |
+
2650
|
1838 |
+
2651
|
1839 |
+
2652
|
1840 |
+
2653
|
1841 |
+
2654
|
1842 |
+
2655
|
1843 |
+
2656
|
1844 |
+
2657
|
1845 |
+
2658
|
1846 |
+
2659
|
1847 |
+
266
|
1848 |
+
2660
|
1849 |
+
2661
|
1850 |
+
2662
|
1851 |
+
2663
|
1852 |
+
2664
|
1853 |
+
2665
|
1854 |
+
2666
|
1855 |
+
2667
|
1856 |
+
2668
|
1857 |
+
2669
|
1858 |
+
267
|
1859 |
+
2670
|
1860 |
+
2671
|
1861 |
+
2672
|
1862 |
+
2673
|
1863 |
+
2674
|
1864 |
+
2675
|
1865 |
+
2676
|
1866 |
+
2677
|
1867 |
+
2678
|
1868 |
+
2679
|
1869 |
+
268
|
1870 |
+
2680
|
1871 |
+
2681
|
1872 |
+
2682
|
1873 |
+
2683
|
1874 |
+
2684
|
1875 |
+
2685
|
1876 |
+
2686
|
1877 |
+
2687
|
1878 |
+
2688
|
1879 |
+
2689
|
1880 |
+
269
|
1881 |
+
2690
|
1882 |
+
2691
|
1883 |
+
2692
|
1884 |
+
2693
|
1885 |
+
2694
|
1886 |
+
2695
|
1887 |
+
2696
|
1888 |
+
2697
|
1889 |
+
2698
|
1890 |
+
2699
|
1891 |
+
27
|
1892 |
+
270
|
1893 |
+
2700
|
1894 |
+
2701
|
1895 |
+
2702
|
1896 |
+
2703
|
1897 |
+
2704
|
1898 |
+
2705
|
1899 |
+
2706
|
1900 |
+
2707
|
1901 |
+
2708
|
1902 |
+
2709
|
1903 |
+
271
|
1904 |
+
2710
|
1905 |
+
2711
|
1906 |
+
2712
|
1907 |
+
2713
|
1908 |
+
2714
|
1909 |
+
2715
|
1910 |
+
2716
|
1911 |
+
2717
|
1912 |
+
2718
|
1913 |
+
2719
|
1914 |
+
272
|
1915 |
+
2720
|
1916 |
+
2721
|
1917 |
+
2722
|
1918 |
+
2723
|
1919 |
+
2724
|
1920 |
+
2725
|
1921 |
+
2726
|
1922 |
+
2727
|
1923 |
+
2728
|
1924 |
+
2729
|
1925 |
+
273
|
1926 |
+
2730
|
1927 |
+
2731
|
1928 |
+
2732
|
1929 |
+
2733
|
1930 |
+
2734
|
1931 |
+
2735
|
1932 |
+
2736
|
1933 |
+
2737
|
1934 |
+
2738
|
1935 |
+
2739
|
1936 |
+
274
|
1937 |
+
2740
|
1938 |
+
2741
|
1939 |
+
2742
|
1940 |
+
2743
|
1941 |
+
2744
|
1942 |
+
2745
|
1943 |
+
2746
|
1944 |
+
2747
|
1945 |
+
2748
|
1946 |
+
2749
|
1947 |
+
275
|
1948 |
+
2750
|
1949 |
+
2751
|
1950 |
+
2752
|
1951 |
+
2753
|
1952 |
+
2754
|
1953 |
+
2755
|
1954 |
+
2756
|
1955 |
+
2757
|
1956 |
+
2758
|
1957 |
+
2759
|
1958 |
+
276
|
1959 |
+
2760
|
1960 |
+
2761
|
1961 |
+
2762
|
1962 |
+
2763
|
1963 |
+
2764
|
1964 |
+
2765
|
1965 |
+
2766
|
1966 |
+
2767
|
1967 |
+
2768
|
1968 |
+
2769
|
1969 |
+
277
|
1970 |
+
2770
|
1971 |
+
2771
|
1972 |
+
2772
|
1973 |
+
2773
|
1974 |
+
2774
|
1975 |
+
2775
|
1976 |
+
2776
|
1977 |
+
2777
|
1978 |
+
2778
|
1979 |
+
2779
|
1980 |
+
278
|
1981 |
+
2780
|
1982 |
+
2781
|
1983 |
+
2782
|
1984 |
+
2783
|
1985 |
+
2784
|
1986 |
+
2785
|
1987 |
+
2786
|
1988 |
+
2787
|
1989 |
+
2788
|
1990 |
+
2789
|
1991 |
+
279
|
1992 |
+
2790
|
1993 |
+
2791
|
1994 |
+
2792
|
1995 |
+
2793
|
1996 |
+
2794
|
1997 |
+
2795
|
1998 |
+
2796
|
1999 |
+
2797
|
2000 |
+
2798
|
2001 |
+
2799
|
2002 |
+
28
|
2003 |
+
280
|
2004 |
+
2800
|
2005 |
+
2801
|
2006 |
+
2802
|
2007 |
+
2803
|
2008 |
+
2804
|
2009 |
+
2805
|
2010 |
+
2806
|
2011 |
+
2807
|
2012 |
+
2808
|
2013 |
+
2809
|
2014 |
+
281
|
2015 |
+
2810
|
2016 |
+
2811
|
2017 |
+
2812
|
2018 |
+
2813
|
2019 |
+
2814
|
2020 |
+
2815
|
2021 |
+
2816
|
2022 |
+
2817
|
2023 |
+
2818
|
2024 |
+
2819
|
2025 |
+
282
|
2026 |
+
2820
|
2027 |
+
2821
|
2028 |
+
2822
|
2029 |
+
2823
|
2030 |
+
2824
|
2031 |
+
2825
|
2032 |
+
2826
|
2033 |
+
2827
|
2034 |
+
2828
|
2035 |
+
2829
|
2036 |
+
283
|
2037 |
+
2830
|
2038 |
+
2831
|
2039 |
+
2832
|
2040 |
+
2833
|
2041 |
+
2834
|
2042 |
+
2835
|
2043 |
+
2836
|
2044 |
+
2837
|
2045 |
+
2838
|
2046 |
+
2839
|
2047 |
+
284
|
2048 |
+
2840
|
2049 |
+
2841
|
2050 |
+
2842
|
2051 |
+
2843
|
2052 |
+
2844
|
2053 |
+
2845
|
2054 |
+
2846
|
2055 |
+
2847
|
2056 |
+
2848
|
2057 |
+
2849
|
2058 |
+
285
|
2059 |
+
2850
|
2060 |
+
2851
|
2061 |
+
2852
|
2062 |
+
2853
|
2063 |
+
2854
|
2064 |
+
2855
|
2065 |
+
2856
|
2066 |
+
2857
|
2067 |
+
2858
|
2068 |
+
2859
|
2069 |
+
286
|
2070 |
+
2860
|
2071 |
+
2861
|
2072 |
+
2862
|
2073 |
+
2863
|
2074 |
+
2864
|
2075 |
+
2865
|
2076 |
+
2866
|
2077 |
+
2867
|
2078 |
+
2868
|
2079 |
+
2869
|
2080 |
+
287
|
2081 |
+
2870
|
2082 |
+
2871
|
2083 |
+
2872
|
2084 |
+
2873
|
2085 |
+
2874
|
2086 |
+
2875
|
2087 |
+
2876
|
2088 |
+
2877
|
2089 |
+
2878
|
2090 |
+
2879
|
2091 |
+
288
|
2092 |
+
2880
|
2093 |
+
2881
|
2094 |
+
2882
|
2095 |
+
2883
|
2096 |
+
2884
|
2097 |
+
2885
|
2098 |
+
2886
|
2099 |
+
2887
|
2100 |
+
2888
|
2101 |
+
2889
|
2102 |
+
289
|
2103 |
+
2890
|
2104 |
+
2891
|
2105 |
+
2892
|
2106 |
+
2893
|
2107 |
+
2894
|
2108 |
+
2895
|
2109 |
+
2896
|
2110 |
+
2897
|
2111 |
+
2898
|
2112 |
+
2899
|
2113 |
+
29
|
2114 |
+
290
|
2115 |
+
2900
|
2116 |
+
2901
|
2117 |
+
2902
|
2118 |
+
2903
|
2119 |
+
2904
|
2120 |
+
2905
|
2121 |
+
2906
|
2122 |
+
2907
|
2123 |
+
2908
|
2124 |
+
2909
|
2125 |
+
291
|
2126 |
+
2910
|
2127 |
+
2911
|
2128 |
+
2912
|
2129 |
+
2913
|
2130 |
+
2914
|
2131 |
+
2915
|
2132 |
+
2916
|
2133 |
+
2917
|
2134 |
+
2918
|
2135 |
+
2919
|
2136 |
+
292
|
2137 |
+
2920
|
2138 |
+
2921
|
2139 |
+
2922
|
2140 |
+
2923
|
2141 |
+
2924
|
2142 |
+
2925
|
2143 |
+
2926
|
2144 |
+
2927
|
2145 |
+
2928
|
2146 |
+
2929
|
2147 |
+
293
|
2148 |
+
2930
|
2149 |
+
2931
|
2150 |
+
2932
|
2151 |
+
2933
|
2152 |
+
2934
|
2153 |
+
2935
|
2154 |
+
2936
|
2155 |
+
2937
|
2156 |
+
2938
|
2157 |
+
2939
|
2158 |
+
294
|
2159 |
+
2940
|
2160 |
+
2941
|
2161 |
+
2942
|
2162 |
+
2943
|
2163 |
+
2944
|
2164 |
+
2945
|
2165 |
+
2946
|
2166 |
+
2947
|
2167 |
+
2948
|
2168 |
+
2949
|
2169 |
+
295
|
2170 |
+
2950
|
2171 |
+
2951
|
2172 |
+
2952
|
2173 |
+
2953
|
2174 |
+
2954
|
2175 |
+
2955
|
2176 |
+
2956
|
2177 |
+
2957
|
2178 |
+
2958
|
2179 |
+
2959
|
2180 |
+
296
|
2181 |
+
2960
|
2182 |
+
2961
|
2183 |
+
2962
|
2184 |
+
2963
|
2185 |
+
2964
|
2186 |
+
2965
|
2187 |
+
2966
|
2188 |
+
2967
|
2189 |
+
2968
|
2190 |
+
2969
|
2191 |
+
297
|
2192 |
+
2970
|
2193 |
+
2971
|
2194 |
+
2972
|
2195 |
+
2973
|
2196 |
+
2974
|
2197 |
+
2975
|
2198 |
+
2976
|
2199 |
+
2977
|
2200 |
+
2978
|
2201 |
+
2979
|
2202 |
+
298
|
2203 |
+
2980
|
2204 |
+
2981
|
2205 |
+
2982
|
2206 |
+
2983
|
2207 |
+
2984
|
2208 |
+
2985
|
2209 |
+
2986
|
2210 |
+
2987
|
2211 |
+
2988
|
2212 |
+
2989
|
2213 |
+
299
|
2214 |
+
2990
|
2215 |
+
2991
|
2216 |
+
2992
|
2217 |
+
2993
|
2218 |
+
2994
|
2219 |
+
2995
|
2220 |
+
2996
|
2221 |
+
2997
|
2222 |
+
2998
|
2223 |
+
2999
|
2224 |
+
3
|
2225 |
+
30
|
2226 |
+
300
|
2227 |
+
3000
|
2228 |
+
3001
|
2229 |
+
3002
|
2230 |
+
3003
|
2231 |
+
3004
|
2232 |
+
3005
|
2233 |
+
3006
|
2234 |
+
3007
|
2235 |
+
3008
|
2236 |
+
3009
|
2237 |
+
301
|
2238 |
+
3010
|
2239 |
+
3011
|
2240 |
+
3012
|
2241 |
+
3013
|
2242 |
+
3014
|
2243 |
+
3015
|
2244 |
+
3016
|
2245 |
+
3017
|
2246 |
+
3018
|
2247 |
+
3019
|
2248 |
+
302
|
2249 |
+
3020
|
2250 |
+
3021
|
2251 |
+
3022
|
2252 |
+
3023
|
2253 |
+
3024
|
2254 |
+
3025
|
2255 |
+
3026
|
2256 |
+
3027
|
2257 |
+
3028
|
2258 |
+
3029
|
2259 |
+
303
|
2260 |
+
3030
|
2261 |
+
3031
|
2262 |
+
3032
|
2263 |
+
3033
|
2264 |
+
3034
|
2265 |
+
3035
|
2266 |
+
3036
|
2267 |
+
3037
|
2268 |
+
3038
|
2269 |
+
3039
|
2270 |
+
304
|
2271 |
+
3040
|
2272 |
+
3041
|
2273 |
+
3042
|
2274 |
+
3043
|
2275 |
+
3044
|
2276 |
+
3045
|
2277 |
+
3046
|
2278 |
+
3047
|
2279 |
+
3048
|
2280 |
+
3049
|
2281 |
+
305
|
2282 |
+
3050
|
2283 |
+
3051
|
2284 |
+
3052
|
2285 |
+
3053
|
2286 |
+
3054
|
2287 |
+
3055
|
2288 |
+
3056
|
2289 |
+
3057
|
2290 |
+
3058
|
2291 |
+
3059
|
2292 |
+
306
|
2293 |
+
3060
|
2294 |
+
3061
|
2295 |
+
3062
|
2296 |
+
3063
|
2297 |
+
3064
|
2298 |
+
3065
|
2299 |
+
3066
|
2300 |
+
3067
|
2301 |
+
3068
|
2302 |
+
3069
|
2303 |
+
307
|
2304 |
+
3070
|
2305 |
+
3071
|
2306 |
+
3072
|
2307 |
+
3073
|
2308 |
+
3074
|
2309 |
+
3075
|
2310 |
+
3076
|
2311 |
+
3077
|
2312 |
+
3078
|
2313 |
+
3079
|
2314 |
+
308
|
2315 |
+
3080
|
2316 |
+
3081
|
2317 |
+
3082
|
2318 |
+
3083
|
2319 |
+
3084
|
2320 |
+
3085
|
2321 |
+
3086
|
2322 |
+
3087
|
2323 |
+
3088
|
2324 |
+
3089
|
2325 |
+
309
|
2326 |
+
3090
|
2327 |
+
3091
|
2328 |
+
3092
|
2329 |
+
3093
|
2330 |
+
3094
|
2331 |
+
3095
|
2332 |
+
3096
|
2333 |
+
3097
|
2334 |
+
3098
|
2335 |
+
3099
|
2336 |
+
31
|
2337 |
+
310
|
2338 |
+
3100
|
2339 |
+
3101
|
2340 |
+
3102
|
2341 |
+
3103
|
2342 |
+
3104
|
2343 |
+
3105
|
2344 |
+
3106
|
2345 |
+
3107
|
2346 |
+
3108
|
2347 |
+
3109
|
2348 |
+
311
|
2349 |
+
3110
|
2350 |
+
3111
|
2351 |
+
3112
|
2352 |
+
3113
|
2353 |
+
3114
|
2354 |
+
3115
|
2355 |
+
3116
|
2356 |
+
3117
|
2357 |
+
3118
|
2358 |
+
3119
|
2359 |
+
312
|
2360 |
+
3120
|
2361 |
+
3121
|
2362 |
+
3122
|
2363 |
+
3123
|
2364 |
+
3124
|
2365 |
+
3125
|
2366 |
+
3126
|
2367 |
+
3127
|
2368 |
+
3128
|
2369 |
+
3129
|
2370 |
+
313
|
2371 |
+
3130
|
2372 |
+
3131
|
2373 |
+
3132
|
2374 |
+
3133
|
2375 |
+
3134
|
2376 |
+
3135
|
2377 |
+
3136
|
2378 |
+
3137
|
2379 |
+
3138
|
2380 |
+
3139
|
2381 |
+
314
|
2382 |
+
3140
|
2383 |
+
3141
|
2384 |
+
3142
|
2385 |
+
3143
|
2386 |
+
3144
|
2387 |
+
3145
|
2388 |
+
3146
|
2389 |
+
3147
|
2390 |
+
3148
|
2391 |
+
3149
|
2392 |
+
315
|
2393 |
+
3150
|
2394 |
+
3151
|
2395 |
+
3152
|
2396 |
+
3153
|
2397 |
+
3154
|
2398 |
+
3155
|
2399 |
+
3156
|
2400 |
+
3157
|
2401 |
+
3158
|
2402 |
+
3159
|
2403 |
+
316
|
2404 |
+
3160
|
2405 |
+
3161
|
2406 |
+
3162
|
2407 |
+
3163
|
2408 |
+
3164
|
2409 |
+
3165
|
2410 |
+
3166
|
2411 |
+
3167
|
2412 |
+
3168
|
2413 |
+
3169
|
2414 |
+
317
|
2415 |
+
3170
|
2416 |
+
3171
|
2417 |
+
3172
|
2418 |
+
3173
|
2419 |
+
3174
|
2420 |
+
3175
|
2421 |
+
3176
|
2422 |
+
3177
|
2423 |
+
3178
|
2424 |
+
3179
|
2425 |
+
318
|
2426 |
+
3180
|
2427 |
+
3181
|
2428 |
+
3182
|
2429 |
+
3183
|
2430 |
+
3184
|
2431 |
+
3185
|
2432 |
+
3186
|
2433 |
+
3187
|
2434 |
+
3188
|
2435 |
+
3189
|
2436 |
+
319
|
2437 |
+
3190
|
2438 |
+
3191
|
2439 |
+
3192
|
2440 |
+
3193
|
2441 |
+
3194
|
2442 |
+
3195
|
2443 |
+
3196
|
2444 |
+
3197
|
2445 |
+
3198
|
2446 |
+
3199
|
2447 |
+
32
|
2448 |
+
320
|
2449 |
+
3200
|
2450 |
+
3201
|
2451 |
+
3202
|
2452 |
+
3203
|
2453 |
+
3204
|
2454 |
+
3205
|
2455 |
+
3206
|
2456 |
+
3207
|
2457 |
+
3208
|
2458 |
+
3209
|
2459 |
+
321
|
2460 |
+
3210
|
2461 |
+
3211
|
2462 |
+
3212
|
2463 |
+
3213
|
2464 |
+
3214
|
2465 |
+
3215
|
2466 |
+
3216
|
2467 |
+
3217
|
2468 |
+
3218
|
2469 |
+
3219
|
2470 |
+
322
|
2471 |
+
3220
|
2472 |
+
3221
|
2473 |
+
3222
|
2474 |
+
3223
|
2475 |
+
3224
|
2476 |
+
3225
|
2477 |
+
3226
|
2478 |
+
3227
|
2479 |
+
3228
|
2480 |
+
3229
|
2481 |
+
323
|
2482 |
+
3230
|
2483 |
+
3231
|
2484 |
+
3232
|
2485 |
+
3233
|
2486 |
+
3234
|
2487 |
+
3235
|
2488 |
+
3236
|
2489 |
+
3237
|
2490 |
+
3238
|
2491 |
+
3239
|
2492 |
+
324
|
2493 |
+
3240
|
2494 |
+
3241
|
2495 |
+
3242
|
2496 |
+
3243
|
2497 |
+
3244
|
2498 |
+
3245
|
2499 |
+
3246
|
2500 |
+
3247
|
2501 |
+
3248
|
2502 |
+
3249
|
2503 |
+
325
|
2504 |
+
3250
|
2505 |
+
3251
|
2506 |
+
3252
|
2507 |
+
3253
|
2508 |
+
3254
|
2509 |
+
3255
|
2510 |
+
3256
|
2511 |
+
3257
|
2512 |
+
3258
|
2513 |
+
3259
|
2514 |
+
326
|
2515 |
+
3260
|
2516 |
+
3261
|
2517 |
+
3262
|
2518 |
+
3263
|
2519 |
+
3264
|
2520 |
+
3265
|
2521 |
+
3266
|
2522 |
+
3267
|
2523 |
+
3268
|
2524 |
+
3269
|
2525 |
+
327
|
2526 |
+
3270
|
2527 |
+
3271
|
2528 |
+
3272
|
2529 |
+
3273
|
2530 |
+
3274
|
2531 |
+
3275
|
2532 |
+
3276
|
2533 |
+
3277
|
2534 |
+
3278
|
2535 |
+
3279
|
2536 |
+
328
|
2537 |
+
3280
|
2538 |
+
3281
|
2539 |
+
3282
|
2540 |
+
3283
|
2541 |
+
3284
|
2542 |
+
3285
|
2543 |
+
3286
|
2544 |
+
3287
|
2545 |
+
3288
|
2546 |
+
3289
|
2547 |
+
329
|
2548 |
+
3290
|
2549 |
+
3291
|
2550 |
+
3292
|
2551 |
+
3293
|
2552 |
+
3294
|
2553 |
+
3295
|
2554 |
+
3296
|
2555 |
+
3297
|
2556 |
+
3298
|
2557 |
+
3299
|
2558 |
+
33
|
2559 |
+
330
|
2560 |
+
3300
|
2561 |
+
3301
|
2562 |
+
3302
|
2563 |
+
3303
|
2564 |
+
3304
|
2565 |
+
3305
|
2566 |
+
3306
|
2567 |
+
3307
|
2568 |
+
3308
|
2569 |
+
3309
|
2570 |
+
331
|
2571 |
+
3310
|
2572 |
+
3311
|
2573 |
+
3312
|
2574 |
+
3313
|
2575 |
+
3314
|
2576 |
+
3315
|
2577 |
+
3316
|
2578 |
+
3317
|
2579 |
+
3318
|
2580 |
+
3319
|
2581 |
+
332
|
2582 |
+
3320
|
2583 |
+
3321
|
2584 |
+
3322
|
2585 |
+
3323
|
2586 |
+
3324
|
2587 |
+
3325
|
2588 |
+
3326
|
2589 |
+
3327
|
2590 |
+
3328
|
2591 |
+
3329
|
2592 |
+
333
|
2593 |
+
3330
|
2594 |
+
3331
|
2595 |
+
3332
|
2596 |
+
3333
|
2597 |
+
3334
|
2598 |
+
3335
|
2599 |
+
3336
|
2600 |
+
3337
|
2601 |
+
3338
|
2602 |
+
3339
|
2603 |
+
334
|
2604 |
+
3340
|
2605 |
+
3341
|
2606 |
+
3342
|
2607 |
+
3343
|
2608 |
+
3344
|
2609 |
+
3345
|
2610 |
+
3346
|
2611 |
+
3347
|
2612 |
+
3348
|
2613 |
+
3349
|
2614 |
+
335
|
2615 |
+
3350
|
2616 |
+
3351
|
2617 |
+
3352
|
2618 |
+
3353
|
2619 |
+
3354
|
2620 |
+
3355
|
2621 |
+
3356
|
2622 |
+
3357
|
2623 |
+
3358
|
2624 |
+
3359
|
2625 |
+
336
|
2626 |
+
3360
|
2627 |
+
3361
|
2628 |
+
3362
|
2629 |
+
3363
|
2630 |
+
3364
|
2631 |
+
3365
|
2632 |
+
3366
|
2633 |
+
3367
|
2634 |
+
3368
|
2635 |
+
3369
|
2636 |
+
337
|
2637 |
+
3370
|
2638 |
+
3371
|
2639 |
+
3372
|
2640 |
+
3373
|
2641 |
+
3374
|
2642 |
+
3375
|
2643 |
+
3376
|
2644 |
+
3377
|
2645 |
+
3378
|
2646 |
+
3379
|
2647 |
+
338
|
2648 |
+
3380
|
2649 |
+
3381
|
2650 |
+
3382
|
2651 |
+
3383
|
2652 |
+
3384
|
2653 |
+
3385
|
2654 |
+
3386
|
2655 |
+
3387
|
2656 |
+
3388
|
2657 |
+
3389
|
2658 |
+
339
|
2659 |
+
3390
|
2660 |
+
3391
|
2661 |
+
3392
|
2662 |
+
3393
|
2663 |
+
3394
|
2664 |
+
3395
|
2665 |
+
3396
|
2666 |
+
3397
|
2667 |
+
3398
|
2668 |
+
3399
|
2669 |
+
34
|
2670 |
+
340
|
2671 |
+
3400
|
2672 |
+
3401
|
2673 |
+
3402
|
2674 |
+
3403
|
2675 |
+
3404
|
2676 |
+
3405
|
2677 |
+
3406
|
2678 |
+
3407
|
2679 |
+
3408
|
2680 |
+
3409
|
2681 |
+
341
|
2682 |
+
3410
|
2683 |
+
3411
|
2684 |
+
3412
|
2685 |
+
3413
|
2686 |
+
3414
|
2687 |
+
3415
|
2688 |
+
3416
|
2689 |
+
3417
|
2690 |
+
3418
|
2691 |
+
3419
|
2692 |
+
342
|
2693 |
+
3420
|
2694 |
+
3421
|
2695 |
+
3422
|
2696 |
+
3423
|
2697 |
+
3424
|
2698 |
+
3425
|
2699 |
+
3426
|
2700 |
+
3427
|
2701 |
+
3428
|
2702 |
+
3429
|
2703 |
+
343
|
2704 |
+
3430
|
2705 |
+
3431
|
2706 |
+
3432
|
2707 |
+
3433
|
2708 |
+
3434
|
2709 |
+
3435
|
2710 |
+
3436
|
2711 |
+
3437
|
2712 |
+
3438
|
2713 |
+
3439
|
2714 |
+
344
|
2715 |
+
3440
|
2716 |
+
3441
|
2717 |
+
3442
|
2718 |
+
3443
|
2719 |
+
3444
|
2720 |
+
3445
|
2721 |
+
3446
|
2722 |
+
3447
|
2723 |
+
3448
|
2724 |
+
3449
|
2725 |
+
345
|
2726 |
+
3450
|
2727 |
+
3451
|
2728 |
+
3452
|
2729 |
+
3453
|
2730 |
+
3454
|
2731 |
+
3455
|
2732 |
+
3456
|
2733 |
+
3457
|
2734 |
+
3458
|
2735 |
+
3459
|
2736 |
+
346
|
2737 |
+
3460
|
2738 |
+
3461
|
2739 |
+
3462
|
2740 |
+
3463
|
2741 |
+
3464
|
2742 |
+
3465
|
2743 |
+
3466
|
2744 |
+
3467
|
2745 |
+
3468
|
2746 |
+
3469
|
2747 |
+
347
|
2748 |
+
3470
|
2749 |
+
3471
|
2750 |
+
3472
|
2751 |
+
3473
|
2752 |
+
3474
|
2753 |
+
3475
|
2754 |
+
3476
|
2755 |
+
3477
|
2756 |
+
3478
|
2757 |
+
3479
|
2758 |
+
348
|
2759 |
+
3480
|
2760 |
+
3481
|
2761 |
+
3482
|
2762 |
+
3483
|
2763 |
+
3484
|
2764 |
+
3485
|
2765 |
+
3486
|
2766 |
+
3487
|
2767 |
+
3488
|
2768 |
+
3489
|
2769 |
+
349
|
2770 |
+
3490
|
2771 |
+
3491
|
2772 |
+
3492
|
2773 |
+
3493
|
2774 |
+
3494
|
2775 |
+
3495
|
2776 |
+
3496
|
2777 |
+
3497
|
2778 |
+
3498
|
2779 |
+
3499
|
2780 |
+
35
|
2781 |
+
350
|
2782 |
+
3500
|
2783 |
+
3501
|
2784 |
+
3502
|
2785 |
+
3503
|
2786 |
+
3504
|
2787 |
+
3505
|
2788 |
+
3506
|
2789 |
+
3507
|
2790 |
+
3508
|
2791 |
+
3509
|
2792 |
+
351
|
2793 |
+
3510
|
2794 |
+
3511
|
2795 |
+
3512
|
2796 |
+
3513
|
2797 |
+
3514
|
2798 |
+
3515
|
2799 |
+
3516
|
2800 |
+
3517
|
2801 |
+
3518
|
2802 |
+
3519
|
2803 |
+
352
|
2804 |
+
3520
|
2805 |
+
3521
|
2806 |
+
3522
|
2807 |
+
3523
|
2808 |
+
3524
|
2809 |
+
3525
|
2810 |
+
3526
|
2811 |
+
3527
|
2812 |
+
3528
|
2813 |
+
3529
|
2814 |
+
353
|
2815 |
+
3530
|
2816 |
+
3531
|
2817 |
+
3532
|
2818 |
+
3533
|
2819 |
+
3534
|
2820 |
+
3535
|
2821 |
+
3536
|
2822 |
+
3537
|
2823 |
+
3538
|
2824 |
+
3539
|
2825 |
+
354
|
2826 |
+
3540
|
2827 |
+
3541
|
2828 |
+
3542
|
2829 |
+
3543
|
2830 |
+
3544
|
2831 |
+
3545
|
2832 |
+
3546
|
2833 |
+
3547
|
2834 |
+
3548
|
2835 |
+
3549
|
2836 |
+
355
|
2837 |
+
3550
|
2838 |
+
3551
|
2839 |
+
3552
|
2840 |
+
3553
|
2841 |
+
3554
|
2842 |
+
3555
|
2843 |
+
3556
|
2844 |
+
3557
|
2845 |
+
3558
|
2846 |
+
3559
|
2847 |
+
356
|
2848 |
+
3560
|
2849 |
+
3561
|
2850 |
+
3562
|
2851 |
+
3563
|
2852 |
+
3564
|
2853 |
+
3565
|
2854 |
+
3566
|
2855 |
+
3567
|
2856 |
+
3568
|
2857 |
+
3569
|
2858 |
+
357
|
2859 |
+
3570
|
2860 |
+
3571
|
2861 |
+
3572
|
2862 |
+
3573
|
2863 |
+
3574
|
2864 |
+
3575
|
2865 |
+
3576
|
2866 |
+
3577
|
2867 |
+
3578
|
2868 |
+
3579
|
2869 |
+
358
|
2870 |
+
3580
|
2871 |
+
3581
|
2872 |
+
3582
|
2873 |
+
3583
|
2874 |
+
3584
|
2875 |
+
3585
|
2876 |
+
3586
|
2877 |
+
3587
|
2878 |
+
3588
|
2879 |
+
3589
|
2880 |
+
359
|
2881 |
+
3590
|
2882 |
+
3591
|
2883 |
+
3592
|
2884 |
+
3593
|
2885 |
+
3594
|
2886 |
+
3595
|
2887 |
+
3596
|
2888 |
+
3597
|
2889 |
+
3598
|
2890 |
+
3599
|
2891 |
+
36
|
2892 |
+
360
|
2893 |
+
3600
|
2894 |
+
3601
|
2895 |
+
3602
|
2896 |
+
3603
|
2897 |
+
3604
|
2898 |
+
3605
|
2899 |
+
3606
|
2900 |
+
3607
|
2901 |
+
3608
|
2902 |
+
3609
|
2903 |
+
361
|
2904 |
+
3610
|
2905 |
+
3611
|
2906 |
+
3612
|
2907 |
+
3613
|
2908 |
+
3614
|
2909 |
+
3615
|
2910 |
+
3616
|
2911 |
+
3617
|
2912 |
+
3618
|
2913 |
+
3619
|
2914 |
+
362
|
2915 |
+
3620
|
2916 |
+
3621
|
2917 |
+
3622
|
2918 |
+
3623
|
2919 |
+
3624
|
2920 |
+
3625
|
2921 |
+
3626
|
2922 |
+
3627
|
2923 |
+
3628
|
2924 |
+
3629
|
2925 |
+
363
|
2926 |
+
3630
|
2927 |
+
3631
|
2928 |
+
3632
|
2929 |
+
3633
|
2930 |
+
3634
|
2931 |
+
3635
|
2932 |
+
3636
|
2933 |
+
3637
|
2934 |
+
3638
|
2935 |
+
3639
|
2936 |
+
364
|
2937 |
+
3640
|
2938 |
+
3641
|
2939 |
+
3642
|
2940 |
+
3643
|
2941 |
+
3644
|
2942 |
+
3645
|
2943 |
+
3646
|
2944 |
+
3647
|
2945 |
+
3648
|
2946 |
+
3649
|
2947 |
+
365
|
2948 |
+
3650
|
2949 |
+
3651
|
2950 |
+
3652
|
2951 |
+
3653
|
2952 |
+
3654
|
2953 |
+
3655
|
2954 |
+
3656
|
2955 |
+
3657
|
2956 |
+
3658
|
2957 |
+
3659
|
2958 |
+
366
|
2959 |
+
3660
|
2960 |
+
3661
|
2961 |
+
3662
|
2962 |
+
3663
|
2963 |
+
3664
|
2964 |
+
3665
|
2965 |
+
3666
|
2966 |
+
3667
|
2967 |
+
3668
|
2968 |
+
3669
|
2969 |
+
367
|
2970 |
+
3670
|
2971 |
+
3671
|
2972 |
+
3672
|
2973 |
+
3673
|
2974 |
+
3674
|
2975 |
+
3675
|
2976 |
+
3676
|
2977 |
+
3677
|
2978 |
+
3678
|
2979 |
+
3679
|
2980 |
+
368
|
2981 |
+
3680
|
2982 |
+
3681
|
2983 |
+
3682
|
2984 |
+
3683
|
2985 |
+
3684
|
2986 |
+
3685
|
2987 |
+
3686
|
2988 |
+
3687
|
2989 |
+
3688
|
2990 |
+
3689
|
2991 |
+
369
|
2992 |
+
3690
|
2993 |
+
3691
|
2994 |
+
3692
|
2995 |
+
3693
|
2996 |
+
3694
|
2997 |
+
3695
|
2998 |
+
3696
|
2999 |
+
3697
|
3000 |
+
3698
|
3001 |
+
3699
|
3002 |
+
37
|
3003 |
+
370
|
3004 |
+
3700
|
3005 |
+
3701
|
3006 |
+
3702
|
3007 |
+
3703
|
3008 |
+
3704
|
3009 |
+
3705
|
3010 |
+
3706
|
3011 |
+
3707
|
3012 |
+
3708
|
3013 |
+
3709
|
3014 |
+
371
|
3015 |
+
3710
|
3016 |
+
3711
|
3017 |
+
3712
|
3018 |
+
3713
|
3019 |
+
3714
|
3020 |
+
3715
|
3021 |
+
3716
|
3022 |
+
3717
|
3023 |
+
3718
|
3024 |
+
3719
|
3025 |
+
372
|
3026 |
+
3720
|
3027 |
+
3721
|
3028 |
+
3722
|
3029 |
+
3723
|
3030 |
+
3724
|
3031 |
+
3725
|
3032 |
+
3726
|
3033 |
+
3727
|
3034 |
+
3728
|
3035 |
+
3729
|
3036 |
+
373
|
3037 |
+
3730
|
3038 |
+
3731
|
3039 |
+
3732
|
3040 |
+
3733
|
3041 |
+
3734
|
3042 |
+
3735
|
3043 |
+
3736
|
3044 |
+
3737
|
3045 |
+
3738
|
3046 |
+
3739
|
3047 |
+
374
|
3048 |
+
3740
|
3049 |
+
3741
|
3050 |
+
3742
|
3051 |
+
3743
|
3052 |
+
3744
|
3053 |
+
3745
|
3054 |
+
3746
|
3055 |
+
3747
|
3056 |
+
3748
|
3057 |
+
3749
|
3058 |
+
375
|
3059 |
+
3750
|
3060 |
+
3751
|
3061 |
+
3752
|
3062 |
+
3753
|
3063 |
+
3754
|
3064 |
+
3755
|
3065 |
+
3756
|
3066 |
+
3757
|
3067 |
+
3758
|
3068 |
+
3759
|
3069 |
+
376
|
3070 |
+
3760
|
3071 |
+
3761
|
3072 |
+
3762
|
3073 |
+
3763
|
3074 |
+
3764
|
3075 |
+
3765
|
3076 |
+
3766
|
3077 |
+
3767
|
3078 |
+
3768
|
3079 |
+
3769
|
3080 |
+
377
|
3081 |
+
3770
|
3082 |
+
3771
|
3083 |
+
3772
|
3084 |
+
3773
|
3085 |
+
3774
|
3086 |
+
3775
|
3087 |
+
3776
|
3088 |
+
3777
|
3089 |
+
3778
|
3090 |
+
3779
|
3091 |
+
378
|
3092 |
+
3780
|
3093 |
+
3781
|
3094 |
+
3782
|
3095 |
+
3783
|
3096 |
+
3784
|
3097 |
+
3785
|
3098 |
+
3786
|
3099 |
+
3787
|
3100 |
+
3788
|
3101 |
+
3789
|
3102 |
+
379
|
3103 |
+
3790
|
3104 |
+
3791
|
3105 |
+
3792
|
3106 |
+
3793
|
3107 |
+
3794
|
3108 |
+
3795
|
3109 |
+
3796
|
3110 |
+
3797
|
3111 |
+
3798
|
3112 |
+
3799
|
3113 |
+
38
|
3114 |
+
380
|
3115 |
+
3800
|
3116 |
+
3801
|
3117 |
+
3802
|
3118 |
+
3803
|
3119 |
+
3804
|
3120 |
+
3805
|
3121 |
+
3806
|
3122 |
+
3807
|
3123 |
+
3808
|
3124 |
+
3809
|
3125 |
+
381
|
3126 |
+
3810
|
3127 |
+
3811
|
3128 |
+
3812
|
3129 |
+
3813
|
3130 |
+
3814
|
3131 |
+
3815
|
3132 |
+
3816
|
3133 |
+
3817
|
3134 |
+
3818
|
3135 |
+
3819
|
3136 |
+
382
|
3137 |
+
3820
|
3138 |
+
3821
|
3139 |
+
3822
|
3140 |
+
3823
|
3141 |
+
3824
|
3142 |
+
3825
|
3143 |
+
3826
|
3144 |
+
3827
|
3145 |
+
3828
|
3146 |
+
3829
|
3147 |
+
383
|
3148 |
+
3830
|
3149 |
+
3831
|
3150 |
+
3832
|
3151 |
+
3833
|
3152 |
+
3834
|
3153 |
+
3835
|
3154 |
+
3836
|
3155 |
+
3837
|
3156 |
+
3838
|
3157 |
+
3839
|
3158 |
+
384
|
3159 |
+
3840
|
3160 |
+
3841
|
3161 |
+
3842
|
3162 |
+
3843
|
3163 |
+
3844
|
3164 |
+
3845
|
3165 |
+
3846
|
3166 |
+
3847
|
3167 |
+
3848
|
3168 |
+
3849
|
3169 |
+
385
|
3170 |
+
3850
|
3171 |
+
3851
|
3172 |
+
3852
|
3173 |
+
3853
|
3174 |
+
3854
|
3175 |
+
3855
|
3176 |
+
3856
|
3177 |
+
3857
|
3178 |
+
3858
|
3179 |
+
3859
|
3180 |
+
386
|
3181 |
+
3860
|
3182 |
+
3861
|
3183 |
+
3862
|
3184 |
+
3863
|
3185 |
+
3864
|
3186 |
+
3865
|
3187 |
+
3866
|
3188 |
+
3867
|
3189 |
+
3868
|
3190 |
+
3869
|
3191 |
+
387
|
3192 |
+
3870
|
3193 |
+
3871
|
3194 |
+
3872
|
3195 |
+
3873
|
3196 |
+
3874
|
3197 |
+
3875
|
3198 |
+
3876
|
3199 |
+
3877
|
3200 |
+
3878
|
3201 |
+
3879
|
3202 |
+
388
|
3203 |
+
3880
|
3204 |
+
3881
|
3205 |
+
3882
|
3206 |
+
3883
|
3207 |
+
3884
|
3208 |
+
3885
|
3209 |
+
3886
|
3210 |
+
3887
|
3211 |
+
3888
|
3212 |
+
3889
|
3213 |
+
389
|
3214 |
+
3890
|
3215 |
+
3891
|
3216 |
+
3892
|
3217 |
+
3893
|
3218 |
+
3894
|
3219 |
+
3895
|
3220 |
+
3896
|
3221 |
+
3897
|
3222 |
+
3898
|
3223 |
+
3899
|
3224 |
+
39
|
3225 |
+
390
|
3226 |
+
3900
|
3227 |
+
3901
|
3228 |
+
3902
|
3229 |
+
3903
|
3230 |
+
3904
|
3231 |
+
3905
|
3232 |
+
3906
|
3233 |
+
3907
|
3234 |
+
3908
|
3235 |
+
3909
|
3236 |
+
391
|
3237 |
+
3910
|
3238 |
+
3911
|
3239 |
+
3912
|
3240 |
+
3913
|
3241 |
+
3914
|
3242 |
+
3915
|
3243 |
+
3916
|
3244 |
+
3917
|
3245 |
+
3918
|
3246 |
+
3919
|
3247 |
+
392
|
3248 |
+
3920
|
3249 |
+
3921
|
3250 |
+
3922
|
3251 |
+
3923
|
3252 |
+
3924
|
3253 |
+
3925
|
3254 |
+
3926
|
3255 |
+
3927
|
3256 |
+
3928
|
3257 |
+
3929
|
3258 |
+
393
|
3259 |
+
3930
|
3260 |
+
3931
|
3261 |
+
3932
|
3262 |
+
3933
|
3263 |
+
3934
|
3264 |
+
3935
|
3265 |
+
3936
|
3266 |
+
3937
|
3267 |
+
3938
|
3268 |
+
3939
|
3269 |
+
394
|
3270 |
+
3940
|
3271 |
+
3941
|
3272 |
+
3942
|
3273 |
+
3943
|
3274 |
+
3944
|
3275 |
+
3945
|
3276 |
+
3946
|
3277 |
+
3947
|
3278 |
+
3948
|
3279 |
+
3949
|
3280 |
+
395
|
3281 |
+
3950
|
3282 |
+
3951
|
3283 |
+
3952
|
3284 |
+
3953
|
3285 |
+
3954
|
3286 |
+
3955
|
3287 |
+
3956
|
3288 |
+
3957
|
3289 |
+
3958
|
3290 |
+
3959
|
3291 |
+
396
|
3292 |
+
3960
|
3293 |
+
3961
|
3294 |
+
3962
|
3295 |
+
3963
|
3296 |
+
3964
|
3297 |
+
3965
|
3298 |
+
3966
|
3299 |
+
3967
|
3300 |
+
3968
|
3301 |
+
3969
|
3302 |
+
397
|
3303 |
+
3970
|
3304 |
+
3971
|
3305 |
+
3972
|
3306 |
+
3973
|
3307 |
+
3974
|
3308 |
+
3975
|
3309 |
+
3976
|
3310 |
+
3977
|
3311 |
+
3978
|
3312 |
+
3979
|
3313 |
+
398
|
3314 |
+
3980
|
3315 |
+
3981
|
3316 |
+
3982
|
3317 |
+
3983
|
3318 |
+
3984
|
3319 |
+
3985
|
3320 |
+
3986
|
3321 |
+
3987
|
3322 |
+
3988
|
3323 |
+
3989
|
3324 |
+
399
|
3325 |
+
3990
|
3326 |
+
3991
|
3327 |
+
3992
|
3328 |
+
3993
|
3329 |
+
3994
|
3330 |
+
3995
|
3331 |
+
3996
|
3332 |
+
3997
|
3333 |
+
3998
|
3334 |
+
3999
|
3335 |
+
4
|
3336 |
+
40
|
3337 |
+
400
|
3338 |
+
4000
|
3339 |
+
4001
|
3340 |
+
4002
|
3341 |
+
4003
|
3342 |
+
4004
|
3343 |
+
4005
|
3344 |
+
4006
|
3345 |
+
4007
|
3346 |
+
4008
|
3347 |
+
4009
|
3348 |
+
401
|
3349 |
+
4010
|
3350 |
+
4011
|
3351 |
+
4012
|
3352 |
+
4013
|
3353 |
+
4014
|
3354 |
+
4015
|
3355 |
+
4016
|
3356 |
+
4017
|
3357 |
+
4018
|
3358 |
+
4019
|
3359 |
+
402
|
3360 |
+
4020
|
3361 |
+
4021
|
3362 |
+
4022
|
3363 |
+
4023
|
3364 |
+
4024
|
3365 |
+
4025
|
3366 |
+
4026
|
3367 |
+
4027
|
3368 |
+
4028
|
3369 |
+
4029
|
3370 |
+
403
|
3371 |
+
4030
|
3372 |
+
4031
|
3373 |
+
4032
|
3374 |
+
4033
|
3375 |
+
4034
|
3376 |
+
4035
|
3377 |
+
4036
|
3378 |
+
4037
|
3379 |
+
4038
|
3380 |
+
4039
|
3381 |
+
404
|
3382 |
+
4040
|
3383 |
+
4041
|
3384 |
+
4042
|
3385 |
+
4043
|
3386 |
+
4044
|
3387 |
+
4045
|
3388 |
+
4046
|
3389 |
+
4047
|
3390 |
+
4048
|
3391 |
+
4049
|
3392 |
+
405
|
3393 |
+
4050
|
3394 |
+
4051
|
3395 |
+
4052
|
3396 |
+
4053
|
3397 |
+
4054
|
3398 |
+
4055
|
3399 |
+
4056
|
3400 |
+
4057
|
3401 |
+
4058
|
3402 |
+
4059
|
3403 |
+
406
|
3404 |
+
4060
|
3405 |
+
4061
|
3406 |
+
4062
|
3407 |
+
4063
|
3408 |
+
4064
|
3409 |
+
4065
|
3410 |
+
4066
|
3411 |
+
4067
|
3412 |
+
4068
|
3413 |
+
4069
|
3414 |
+
407
|
3415 |
+
4070
|
3416 |
+
4071
|
3417 |
+
4072
|
3418 |
+
4073
|
3419 |
+
4074
|
3420 |
+
4075
|
3421 |
+
4076
|
3422 |
+
4077
|
3423 |
+
4078
|
3424 |
+
4079
|
3425 |
+
408
|
3426 |
+
4080
|
3427 |
+
4081
|
3428 |
+
4082
|
3429 |
+
4083
|
3430 |
+
4084
|
3431 |
+
4085
|
3432 |
+
4086
|
3433 |
+
4087
|
3434 |
+
4088
|
3435 |
+
4089
|
3436 |
+
409
|
3437 |
+
4090
|
3438 |
+
4091
|
3439 |
+
4092
|
3440 |
+
4093
|
3441 |
+
4094
|
3442 |
+
4095
|
3443 |
+
41
|
3444 |
+
410
|
3445 |
+
411
|
3446 |
+
412
|
3447 |
+
413
|
3448 |
+
414
|
3449 |
+
415
|
3450 |
+
416
|
3451 |
+
417
|
3452 |
+
418
|
3453 |
+
419
|
3454 |
+
42
|
3455 |
+
420
|
3456 |
+
421
|
3457 |
+
422
|
3458 |
+
423
|
3459 |
+
424
|
3460 |
+
425
|
3461 |
+
426
|
3462 |
+
427
|
3463 |
+
428
|
3464 |
+
429
|
3465 |
+
43
|
3466 |
+
430
|
3467 |
+
431
|
3468 |
+
432
|
3469 |
+
433
|
3470 |
+
434
|
3471 |
+
435
|
3472 |
+
436
|
3473 |
+
437
|
3474 |
+
438
|
3475 |
+
439
|
3476 |
+
44
|
3477 |
+
440
|
3478 |
+
441
|
3479 |
+
442
|
3480 |
+
443
|
3481 |
+
444
|
3482 |
+
445
|
3483 |
+
446
|
3484 |
+
447
|
3485 |
+
448
|
3486 |
+
449
|
3487 |
+
45
|
3488 |
+
450
|
3489 |
+
451
|
3490 |
+
452
|
3491 |
+
453
|
3492 |
+
454
|
3493 |
+
455
|
3494 |
+
456
|
3495 |
+
457
|
3496 |
+
458
|
3497 |
+
459
|
3498 |
+
46
|
3499 |
+
460
|
3500 |
+
461
|
3501 |
+
462
|
3502 |
+
463
|
3503 |
+
464
|
3504 |
+
465
|
3505 |
+
466
|
3506 |
+
467
|
3507 |
+
468
|
3508 |
+
469
|
3509 |
+
47
|
3510 |
+
470
|
3511 |
+
471
|
3512 |
+
472
|
3513 |
+
473
|
3514 |
+
474
|
3515 |
+
475
|
3516 |
+
476
|
3517 |
+
477
|
3518 |
+
478
|
3519 |
+
479
|
3520 |
+
48
|
3521 |
+
480
|
3522 |
+
481
|
3523 |
+
482
|
3524 |
+
483
|
3525 |
+
484
|
3526 |
+
485
|
3527 |
+
486
|
3528 |
+
487
|
3529 |
+
488
|
3530 |
+
489
|
3531 |
+
49
|
3532 |
+
490
|
3533 |
+
491
|
3534 |
+
492
|
3535 |
+
493
|
3536 |
+
494
|
3537 |
+
495
|
3538 |
+
496
|
3539 |
+
497
|
3540 |
+
498
|
3541 |
+
499
|
3542 |
+
5
|
3543 |
+
50
|
3544 |
+
500
|
3545 |
+
501
|
3546 |
+
502
|
3547 |
+
503
|
3548 |
+
504
|
3549 |
+
505
|
3550 |
+
506
|
3551 |
+
507
|
3552 |
+
508
|
3553 |
+
509
|
3554 |
+
51
|
3555 |
+
510
|
3556 |
+
511
|
3557 |
+
512
|
3558 |
+
513
|
3559 |
+
514
|
3560 |
+
515
|
3561 |
+
516
|
3562 |
+
517
|
3563 |
+
518
|
3564 |
+
519
|
3565 |
+
52
|
3566 |
+
520
|
3567 |
+
521
|
3568 |
+
522
|
3569 |
+
523
|
3570 |
+
524
|
3571 |
+
525
|
3572 |
+
526
|
3573 |
+
527
|
3574 |
+
528
|
3575 |
+
529
|
3576 |
+
53
|
3577 |
+
530
|
3578 |
+
531
|
3579 |
+
532
|
3580 |
+
533
|
3581 |
+
534
|
3582 |
+
535
|
3583 |
+
536
|
3584 |
+
537
|
3585 |
+
538
|
3586 |
+
539
|
3587 |
+
54
|
3588 |
+
540
|
3589 |
+
541
|
3590 |
+
542
|
3591 |
+
543
|
3592 |
+
544
|
3593 |
+
545
|
3594 |
+
546
|
3595 |
+
547
|
3596 |
+
548
|
3597 |
+
549
|
3598 |
+
55
|
3599 |
+
550
|
3600 |
+
551
|
3601 |
+
552
|
3602 |
+
553
|
3603 |
+
554
|
3604 |
+
555
|
3605 |
+
556
|
3606 |
+
557
|
3607 |
+
558
|
3608 |
+
559
|
3609 |
+
56
|
3610 |
+
560
|
3611 |
+
561
|
3612 |
+
562
|
3613 |
+
563
|
3614 |
+
564
|
3615 |
+
565
|
3616 |
+
566
|
3617 |
+
567
|
3618 |
+
568
|
3619 |
+
569
|
3620 |
+
57
|
3621 |
+
570
|
3622 |
+
571
|
3623 |
+
572
|
3624 |
+
573
|
3625 |
+
574
|
3626 |
+
575
|
3627 |
+
576
|
3628 |
+
577
|
3629 |
+
578
|
3630 |
+
579
|
3631 |
+
58
|
3632 |
+
580
|
3633 |
+
581
|
3634 |
+
582
|
3635 |
+
583
|
3636 |
+
584
|
3637 |
+
585
|
3638 |
+
586
|
3639 |
+
587
|
3640 |
+
588
|
3641 |
+
589
|
3642 |
+
59
|
3643 |
+
590
|
3644 |
+
591
|
3645 |
+
592
|
3646 |
+
593
|
3647 |
+
594
|
3648 |
+
595
|
3649 |
+
596
|
3650 |
+
597
|
3651 |
+
598
|
3652 |
+
599
|
3653 |
+
6
|
3654 |
+
60
|
3655 |
+
600
|
3656 |
+
601
|
3657 |
+
602
|
3658 |
+
603
|
3659 |
+
604
|
3660 |
+
605
|
3661 |
+
606
|
3662 |
+
607
|
3663 |
+
608
|
3664 |
+
609
|
3665 |
+
61
|
3666 |
+
610
|
3667 |
+
611
|
3668 |
+
612
|
3669 |
+
613
|
3670 |
+
614
|
3671 |
+
615
|
3672 |
+
616
|
3673 |
+
617
|
3674 |
+
618
|
3675 |
+
619
|
3676 |
+
62
|
3677 |
+
620
|
3678 |
+
621
|
3679 |
+
622
|
3680 |
+
623
|
3681 |
+
624
|
3682 |
+
625
|
3683 |
+
626
|
3684 |
+
627
|
3685 |
+
628
|
3686 |
+
629
|
3687 |
+
63
|
3688 |
+
630
|
3689 |
+
631
|
3690 |
+
632
|
3691 |
+
633
|
3692 |
+
634
|
3693 |
+
635
|
3694 |
+
636
|
3695 |
+
637
|
3696 |
+
638
|
3697 |
+
639
|
3698 |
+
64
|
3699 |
+
640
|
3700 |
+
641
|
3701 |
+
642
|
3702 |
+
643
|
3703 |
+
644
|
3704 |
+
645
|
3705 |
+
646
|
3706 |
+
647
|
3707 |
+
648
|
3708 |
+
649
|
3709 |
+
65
|
3710 |
+
650
|
3711 |
+
651
|
3712 |
+
652
|
3713 |
+
653
|
3714 |
+
654
|
3715 |
+
655
|
3716 |
+
656
|
3717 |
+
657
|
3718 |
+
658
|
3719 |
+
659
|
3720 |
+
66
|
3721 |
+
660
|
3722 |
+
661
|
3723 |
+
662
|
3724 |
+
663
|
3725 |
+
664
|
3726 |
+
665
|
3727 |
+
666
|
3728 |
+
667
|
3729 |
+
668
|
3730 |
+
669
|
3731 |
+
67
|
3732 |
+
670
|
3733 |
+
671
|
3734 |
+
672
|
3735 |
+
673
|
3736 |
+
674
|
3737 |
+
675
|
3738 |
+
676
|
3739 |
+
677
|
3740 |
+
678
|
3741 |
+
679
|
3742 |
+
68
|
3743 |
+
680
|
3744 |
+
681
|
3745 |
+
682
|
3746 |
+
683
|
3747 |
+
684
|
3748 |
+
685
|
3749 |
+
686
|
3750 |
+
687
|
3751 |
+
688
|
3752 |
+
689
|
3753 |
+
69
|
3754 |
+
690
|
3755 |
+
691
|
3756 |
+
692
|
3757 |
+
693
|
3758 |
+
694
|
3759 |
+
695
|
3760 |
+
696
|
3761 |
+
697
|
3762 |
+
698
|
3763 |
+
699
|
3764 |
+
7
|
3765 |
+
70
|
3766 |
+
700
|
3767 |
+
701
|
3768 |
+
702
|
3769 |
+
703
|
3770 |
+
704
|
3771 |
+
705
|
3772 |
+
706
|
3773 |
+
707
|
3774 |
+
708
|
3775 |
+
709
|
3776 |
+
71
|
3777 |
+
710
|
3778 |
+
711
|
3779 |
+
712
|
3780 |
+
713
|
3781 |
+
714
|
3782 |
+
715
|
3783 |
+
716
|
3784 |
+
717
|
3785 |
+
718
|
3786 |
+
719
|
3787 |
+
72
|
3788 |
+
720
|
3789 |
+
721
|
3790 |
+
722
|
3791 |
+
723
|
3792 |
+
724
|
3793 |
+
725
|
3794 |
+
726
|
3795 |
+
727
|
3796 |
+
728
|
3797 |
+
729
|
3798 |
+
73
|
3799 |
+
730
|
3800 |
+
731
|
3801 |
+
732
|
3802 |
+
733
|
3803 |
+
734
|
3804 |
+
735
|
3805 |
+
736
|
3806 |
+
737
|
3807 |
+
738
|
3808 |
+
739
|
3809 |
+
74
|
3810 |
+
740
|
3811 |
+
741
|
3812 |
+
742
|
3813 |
+
743
|
3814 |
+
744
|
3815 |
+
745
|
3816 |
+
746
|
3817 |
+
747
|
3818 |
+
748
|
3819 |
+
749
|
3820 |
+
75
|
3821 |
+
750
|
3822 |
+
751
|
3823 |
+
752
|
3824 |
+
753
|
3825 |
+
754
|
3826 |
+
755
|
3827 |
+
756
|
3828 |
+
757
|
3829 |
+
758
|
3830 |
+
759
|
3831 |
+
76
|
3832 |
+
760
|
3833 |
+
761
|
3834 |
+
762
|
3835 |
+
763
|
3836 |
+
764
|
3837 |
+
765
|
3838 |
+
766
|
3839 |
+
767
|
3840 |
+
768
|
3841 |
+
769
|
3842 |
+
77
|
3843 |
+
770
|
3844 |
+
771
|
3845 |
+
772
|
3846 |
+
773
|
3847 |
+
774
|
3848 |
+
775
|
3849 |
+
776
|
3850 |
+
777
|
3851 |
+
778
|
3852 |
+
779
|
3853 |
+
78
|
3854 |
+
780
|
3855 |
+
781
|
3856 |
+
782
|
3857 |
+
783
|
3858 |
+
784
|
3859 |
+
785
|
3860 |
+
786
|
3861 |
+
787
|
3862 |
+
788
|
3863 |
+
789
|
3864 |
+
79
|
3865 |
+
790
|
3866 |
+
791
|
3867 |
+
792
|
3868 |
+
793
|
3869 |
+
794
|
3870 |
+
795
|
3871 |
+
796
|
3872 |
+
797
|
3873 |
+
798
|
3874 |
+
799
|
3875 |
+
8
|
3876 |
+
80
|
3877 |
+
800
|
3878 |
+
801
|
3879 |
+
802
|
3880 |
+
803
|
3881 |
+
804
|
3882 |
+
805
|
3883 |
+
806
|
3884 |
+
807
|
3885 |
+
808
|
3886 |
+
809
|
3887 |
+
81
|
3888 |
+
810
|
3889 |
+
811
|
3890 |
+
812
|
3891 |
+
813
|
3892 |
+
814
|
3893 |
+
815
|
3894 |
+
816
|
3895 |
+
817
|
3896 |
+
818
|
3897 |
+
819
|
3898 |
+
82
|
3899 |
+
820
|
3900 |
+
821
|
3901 |
+
822
|
3902 |
+
823
|
3903 |
+
824
|
3904 |
+
825
|
3905 |
+
826
|
3906 |
+
827
|
3907 |
+
828
|
3908 |
+
829
|
3909 |
+
83
|
3910 |
+
830
|
3911 |
+
831
|
3912 |
+
832
|
3913 |
+
833
|
3914 |
+
834
|
3915 |
+
835
|
3916 |
+
836
|
3917 |
+
837
|
3918 |
+
838
|
3919 |
+
839
|
3920 |
+
84
|
3921 |
+
840
|
3922 |
+
841
|
3923 |
+
842
|
3924 |
+
843
|
3925 |
+
844
|
3926 |
+
845
|
3927 |
+
846
|
3928 |
+
847
|
3929 |
+
848
|
3930 |
+
849
|
3931 |
+
85
|
3932 |
+
850
|
3933 |
+
851
|
3934 |
+
852
|
3935 |
+
853
|
3936 |
+
854
|
3937 |
+
855
|
3938 |
+
856
|
3939 |
+
857
|
3940 |
+
858
|
3941 |
+
859
|
3942 |
+
86
|
3943 |
+
860
|
3944 |
+
861
|
3945 |
+
862
|
3946 |
+
863
|
3947 |
+
864
|
3948 |
+
865
|
3949 |
+
866
|
3950 |
+
867
|
3951 |
+
868
|
3952 |
+
869
|
3953 |
+
87
|
3954 |
+
870
|
3955 |
+
871
|
3956 |
+
872
|
3957 |
+
873
|
3958 |
+
874
|
3959 |
+
875
|
3960 |
+
876
|
3961 |
+
877
|
3962 |
+
878
|
3963 |
+
879
|
3964 |
+
88
|
3965 |
+
880
|
3966 |
+
881
|
3967 |
+
882
|
3968 |
+
883
|
3969 |
+
884
|
3970 |
+
885
|
3971 |
+
886
|
3972 |
+
887
|
3973 |
+
888
|
3974 |
+
889
|
3975 |
+
89
|
3976 |
+
890
|
3977 |
+
891
|
3978 |
+
892
|
3979 |
+
893
|
3980 |
+
894
|
3981 |
+
895
|
3982 |
+
896
|
3983 |
+
897
|
3984 |
+
898
|
3985 |
+
899
|
3986 |
+
9
|
3987 |
+
90
|
3988 |
+
900
|
3989 |
+
901
|
3990 |
+
902
|
3991 |
+
903
|
3992 |
+
904
|
3993 |
+
905
|
3994 |
+
906
|
3995 |
+
907
|
3996 |
+
908
|
3997 |
+
909
|
3998 |
+
91
|
3999 |
+
910
|
4000 |
+
911
|
4001 |
+
912
|
4002 |
+
913
|
4003 |
+
914
|
4004 |
+
915
|
4005 |
+
916
|
4006 |
+
917
|
4007 |
+
918
|
4008 |
+
919
|
4009 |
+
92
|
4010 |
+
920
|
4011 |
+
921
|
4012 |
+
922
|
4013 |
+
923
|
4014 |
+
924
|
4015 |
+
925
|
4016 |
+
926
|
4017 |
+
927
|
4018 |
+
928
|
4019 |
+
929
|
4020 |
+
93
|
4021 |
+
930
|
4022 |
+
931
|
4023 |
+
932
|
4024 |
+
933
|
4025 |
+
934
|
4026 |
+
935
|
4027 |
+
936
|
4028 |
+
937
|
4029 |
+
938
|
4030 |
+
939
|
4031 |
+
94
|
4032 |
+
940
|
4033 |
+
941
|
4034 |
+
942
|
4035 |
+
943
|
4036 |
+
944
|
4037 |
+
945
|
4038 |
+
946
|
4039 |
+
947
|
4040 |
+
948
|
4041 |
+
949
|
4042 |
+
95
|
4043 |
+
950
|
4044 |
+
951
|
4045 |
+
952
|
4046 |
+
953
|
4047 |
+
954
|
4048 |
+
955
|
4049 |
+
956
|
4050 |
+
957
|
4051 |
+
958
|
4052 |
+
959
|
4053 |
+
96
|
4054 |
+
960
|
4055 |
+
961
|
4056 |
+
962
|
4057 |
+
963
|
4058 |
+
964
|
4059 |
+
965
|
4060 |
+
966
|
4061 |
+
967
|
4062 |
+
968
|
4063 |
+
969
|
4064 |
+
97
|
4065 |
+
970
|
4066 |
+
971
|
4067 |
+
972
|
4068 |
+
973
|
4069 |
+
974
|
4070 |
+
975
|
4071 |
+
976
|
4072 |
+
977
|
4073 |
+
978
|
4074 |
+
979
|
4075 |
+
98
|
4076 |
+
980
|
4077 |
+
981
|
4078 |
+
982
|
4079 |
+
983
|
4080 |
+
984
|
4081 |
+
985
|
4082 |
+
986
|
4083 |
+
987
|
4084 |
+
988
|
4085 |
+
989
|
4086 |
+
99
|
4087 |
+
990
|
4088 |
+
991
|
4089 |
+
992
|
4090 |
+
993
|
4091 |
+
994
|
4092 |
+
995
|
4093 |
+
996
|
4094 |
+
997
|
4095 |
+
998
|
4096 |
+
999
|
semantic_detokenizer/f5tts_npu_patch.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Xiao Chen)
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
|
16 |
+
from patch_utils import MindSpeedPatchesManager as aspm
|
17 |
+
import torchaudio
|
18 |
+
import torch
|
19 |
+
import logging
|
20 |
+
|
21 |
+
def get_vocos_mel_spectrogram_cpu(
|
22 |
+
waveform,
|
23 |
+
n_fft=1024,
|
24 |
+
n_mel_channels=100,
|
25 |
+
target_sample_rate=24000,
|
26 |
+
hop_length=256,
|
27 |
+
win_length=1024,
|
28 |
+
):
|
29 |
+
wave_device = waveform.device
|
30 |
+
waveform = waveform.cpu()
|
31 |
+
mel_stft = torchaudio.transforms.MelSpectrogram(
|
32 |
+
sample_rate=target_sample_rate,
|
33 |
+
n_fft=n_fft,
|
34 |
+
win_length=win_length,
|
35 |
+
hop_length=hop_length,
|
36 |
+
n_mels=n_mel_channels,
|
37 |
+
power=1,
|
38 |
+
center=True,
|
39 |
+
normalized=False,
|
40 |
+
norm=None,
|
41 |
+
).to(waveform.device)
|
42 |
+
if len(waveform.shape) == 3:
|
43 |
+
waveform = waveform.squeeze(1) # 'b 1 nw -> b nw'
|
44 |
+
|
45 |
+
assert len(waveform.shape) == 2
|
46 |
+
|
47 |
+
mel = mel_stft(waveform)
|
48 |
+
mel = mel.clamp(min=1e-5).log()
|
49 |
+
waveform = waveform.to(wave_device)
|
50 |
+
mel = mel.to(wave_device)
|
51 |
+
return mel
|
52 |
+
|
53 |
+
|
54 |
+
def load_checkpoint_npu(model, ckpt_path, device: str, dtype=None, use_ema=True):
|
55 |
+
logging.info(f"Load checkpoint {ckpt_path}")
|
56 |
+
if dtype is None:
|
57 |
+
dtype = (
|
58 |
+
torch.float16
|
59 |
+
if "cuda" in device or "npu" in device
|
60 |
+
and torch.cuda.get_device_properties(device).major >= 6
|
61 |
+
and not torch.cuda.get_device_name().endswith("[ZLUDA]")
|
62 |
+
else torch.float32
|
63 |
+
)
|
64 |
+
model = model.to(dtype)
|
65 |
+
|
66 |
+
ckpt_type = ckpt_path.split(".")[-1]
|
67 |
+
if ckpt_type == "safetensors":
|
68 |
+
from safetensors.torch import load_file
|
69 |
+
|
70 |
+
checkpoint = load_file(ckpt_path, device=device)
|
71 |
+
else:
|
72 |
+
checkpoint = torch.load(ckpt_path, map_location=device, weights_only=True)
|
73 |
+
|
74 |
+
if use_ema:
|
75 |
+
if ckpt_type == "safetensors":
|
76 |
+
checkpoint = {"ema_model_state_dict": checkpoint}
|
77 |
+
checkpoint["model_state_dict"] = {
|
78 |
+
k.replace("ema_model.", ""): v
|
79 |
+
for k, v in checkpoint["ema_model_state_dict"].items()
|
80 |
+
if k not in ["initted", "step"]
|
81 |
+
}
|
82 |
+
|
83 |
+
# patch for backward compatibility, 305e3ea
|
84 |
+
for key in [
|
85 |
+
"mel_spec.mel_stft.mel_scale.fb",
|
86 |
+
"mel_spec.mel_stft.spectrogram.window",
|
87 |
+
]:
|
88 |
+
if key in checkpoint["model_state_dict"]:
|
89 |
+
del checkpoint["model_state_dict"][key]
|
90 |
+
|
91 |
+
model.load_state_dict(checkpoint["model_state_dict"])
|
92 |
+
else:
|
93 |
+
if ckpt_type == "safetensors":
|
94 |
+
checkpoint = {"model_state_dict": checkpoint}
|
95 |
+
model.load_state_dict(checkpoint["model_state_dict"])
|
96 |
+
|
97 |
+
del checkpoint
|
98 |
+
torch.cuda.empty_cache()
|
99 |
+
|
100 |
+
return model.to(device)
|
101 |
+
|
102 |
+
def patch_for_npu():
|
103 |
+
# replace torch.cuda.get_device_capability with implementation from MindSpeed
|
104 |
+
aspm.register_patch('f5_tts.infer.utils_infer.load_checkpoint', load_checkpoint_npu)
|
105 |
+
aspm.register_patch('f5_tts.model.modules.get_vocos_mel_spectrogram', get_vocos_mel_spectrogram_cpu)
|
106 |
+
aspm.apply_patches()
|
semantic_detokenizer/model/__init__.py
ADDED
File without changes
|
semantic_detokenizer/model/cadit.py
ADDED
@@ -0,0 +1,263 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Dehua Tao)
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
|
16 |
+
"""
|
17 |
+
ein notation:
|
18 |
+
b - batch
|
19 |
+
n - sequence
|
20 |
+
nt - text sequence
|
21 |
+
nw - raw wave length
|
22 |
+
d - dimension
|
23 |
+
"""
|
24 |
+
|
25 |
+
from __future__ import annotations
|
26 |
+
|
27 |
+
import torch
|
28 |
+
from torch import nn
|
29 |
+
import torch.nn.functional as F
|
30 |
+
from torchaudio.models import Conformer
|
31 |
+
|
32 |
+
from x_transformers.x_transformers import RotaryEmbedding
|
33 |
+
|
34 |
+
from f5_tts.model.modules import (
|
35 |
+
TimestepEmbedding,
|
36 |
+
ConvNeXtV2Block,
|
37 |
+
ConvPositionEmbedding,
|
38 |
+
AdaLayerNormZero_Final,
|
39 |
+
precompute_freqs_cis,
|
40 |
+
get_pos_embed_indices,
|
41 |
+
)
|
42 |
+
|
43 |
+
from model.modules import CADiTBlock
|
44 |
+
|
45 |
+
import logging
|
46 |
+
|
47 |
+
# Text embedding
|
48 |
+
class TextEmbedding(nn.Module):
|
49 |
+
def __init__(
|
50 |
+
self,
|
51 |
+
text_num_embeds,
|
52 |
+
text_dim,
|
53 |
+
should_extend_text=True,
|
54 |
+
conv_layers=0,
|
55 |
+
conv_mult=2,
|
56 |
+
):
|
57 |
+
super().__init__()
|
58 |
+
self.text_embed = nn.Embedding(
|
59 |
+
text_num_embeds + 1, text_dim
|
60 |
+
) # use 0 as filler token
|
61 |
+
|
62 |
+
self.should_extend_text = should_extend_text
|
63 |
+
logging.info(f"should_extend_text={should_extend_text}")
|
64 |
+
|
65 |
+
if conv_layers > 0:
|
66 |
+
self.extra_modeling = True
|
67 |
+
self.precompute_max_pos = 4096 # ~44s of 24khz audio
|
68 |
+
self.register_buffer(
|
69 |
+
"freqs_cis",
|
70 |
+
precompute_freqs_cis(text_dim, self.precompute_max_pos),
|
71 |
+
persistent=False,
|
72 |
+
)
|
73 |
+
self.text_blocks = nn.Sequential(
|
74 |
+
*[
|
75 |
+
ConvNeXtV2Block(text_dim, text_dim * conv_mult)
|
76 |
+
for _ in range(conv_layers)
|
77 |
+
]
|
78 |
+
)
|
79 |
+
# # Can be deleted
|
80 |
+
# self.text_blocks = Conformer(
|
81 |
+
# input_dim=text_dim,
|
82 |
+
# num_heads=8, # Not sure it is good
|
83 |
+
# ffn_dim=text_dim * conv_mult,
|
84 |
+
# num_layers=conv_layers,
|
85 |
+
# depthwise_conv_kernel_size=7, # See ConvNeXtV2Block
|
86 |
+
# )
|
87 |
+
else:
|
88 |
+
self.extra_modeling = False
|
89 |
+
|
90 |
+
def forward(self, text: int["b nt"], seq_len, drop_text=False): # noqa: F722
|
91 |
+
text = (
|
92 |
+
text + 1
|
93 |
+
) # use 0 as filler token. preprocess of batch pad -1, see list_str_to_idx()
|
94 |
+
text = text[
|
95 |
+
:, :seq_len
|
96 |
+
] # curtail if character tokens are more than the mel spec tokens
|
97 |
+
batch, text_len = text.shape[0], text.shape[1]
|
98 |
+
|
99 |
+
if self.should_extend_text:
|
100 |
+
text = F.pad(text, (0, seq_len - text_len), value=0)
|
101 |
+
else:
|
102 |
+
seq_len = text_len
|
103 |
+
|
104 |
+
if drop_text: # cfg for text
|
105 |
+
text = torch.zeros_like(text)
|
106 |
+
|
107 |
+
text = self.text_embed(text) # b n -> b n d
|
108 |
+
|
109 |
+
# possible extra modeling
|
110 |
+
if self.extra_modeling:
|
111 |
+
# sinus pos emb
|
112 |
+
batch_start = torch.zeros((batch,), dtype=torch.long)
|
113 |
+
pos_idx = get_pos_embed_indices(
|
114 |
+
batch_start, seq_len, max_pos=self.precompute_max_pos
|
115 |
+
)
|
116 |
+
text_pos_embed = self.freqs_cis[pos_idx]
|
117 |
+
text = text + text_pos_embed
|
118 |
+
|
119 |
+
# convnextv2 blocks
|
120 |
+
text = self.text_blocks(text)
|
121 |
+
|
122 |
+
# # Can be deleted
|
123 |
+
# # conformer blocks
|
124 |
+
# lengths = torch.Tensor([text.size(1)] * text.size(0)).to(text.device)
|
125 |
+
# text, _ = self.text_blocks(text, lengths)
|
126 |
+
|
127 |
+
return text
|
128 |
+
|
129 |
+
|
130 |
+
# noised input audio embedding
|
131 |
+
|
132 |
+
|
133 |
+
class InputAudioEmbedding(nn.Module):
|
134 |
+
def __init__(self, mel_dim, out_dim):
|
135 |
+
super().__init__()
|
136 |
+
self.proj = nn.Linear(mel_dim * 2, out_dim)
|
137 |
+
self.conv_pos_embed = ConvPositionEmbedding(dim=out_dim)
|
138 |
+
|
139 |
+
def forward(
|
140 |
+
self,
|
141 |
+
x: float["b n d"],
|
142 |
+
cond: float["b n d"],
|
143 |
+
drop_audio_cond=False,
|
144 |
+
): # noqa: F722
|
145 |
+
if drop_audio_cond: # cfg for cond audio
|
146 |
+
cond = torch.zeros_like(cond)
|
147 |
+
|
148 |
+
x = self.proj(torch.cat((x, cond), dim=-1))
|
149 |
+
x = self.conv_pos_embed(x) + x
|
150 |
+
return x
|
151 |
+
|
152 |
+
|
153 |
+
# Transformer backbone using cross-attention DiT blocks
|
154 |
+
|
155 |
+
|
156 |
+
class CADiT(nn.Module):
|
157 |
+
def __init__(
|
158 |
+
self,
|
159 |
+
*,
|
160 |
+
dim,
|
161 |
+
depth=8,
|
162 |
+
heads=8,
|
163 |
+
dim_head=64,
|
164 |
+
dropout=0.1,
|
165 |
+
ff_mult=4,
|
166 |
+
mel_dim=100,
|
167 |
+
text_num_embeds=256,
|
168 |
+
text_dim=None,
|
169 |
+
should_extend_text=True,
|
170 |
+
conv_layers=0,
|
171 |
+
long_skip_connection=False,
|
172 |
+
checkpoint_activations=False,
|
173 |
+
):
|
174 |
+
super().__init__()
|
175 |
+
|
176 |
+
self.time_embed = TimestepEmbedding(dim)
|
177 |
+
if text_dim is None:
|
178 |
+
text_dim = mel_dim
|
179 |
+
self.text_embed = TextEmbedding(
|
180 |
+
text_num_embeds,
|
181 |
+
text_dim,
|
182 |
+
should_extend_text=should_extend_text,
|
183 |
+
conv_layers=conv_layers,
|
184 |
+
)
|
185 |
+
|
186 |
+
# Modification: only concatenate noisy and masked speech
|
187 |
+
self.input_embed = InputAudioEmbedding(mel_dim, dim)
|
188 |
+
|
189 |
+
self.rotary_embed = RotaryEmbedding(dim_head)
|
190 |
+
|
191 |
+
self.dim = dim
|
192 |
+
self.depth = depth
|
193 |
+
|
194 |
+
# Modification: use cross-attention DiT block
|
195 |
+
self.transformer_blocks = nn.ModuleList(
|
196 |
+
[
|
197 |
+
CADiTBlock(
|
198 |
+
dim=dim,
|
199 |
+
text_dim=text_dim,
|
200 |
+
heads=heads,
|
201 |
+
dim_head=dim_head,
|
202 |
+
ff_mult=ff_mult,
|
203 |
+
dropout=dropout,
|
204 |
+
)
|
205 |
+
for _ in range(depth)
|
206 |
+
]
|
207 |
+
)
|
208 |
+
self.long_skip_connection = (
|
209 |
+
nn.Linear(dim * 2, dim, bias=False) if long_skip_connection else None
|
210 |
+
)
|
211 |
+
|
212 |
+
self.norm_out = AdaLayerNormZero_Final(dim) # final modulation
|
213 |
+
self.proj_out = nn.Linear(dim, mel_dim)
|
214 |
+
|
215 |
+
self.checkpoint_activations = checkpoint_activations
|
216 |
+
|
217 |
+
def ckpt_wrapper(self, module):
|
218 |
+
# https://github.com/chuanyangjin/fast-DiT/blob/main/models.py
|
219 |
+
def ckpt_forward(*inputs):
|
220 |
+
outputs = module(*inputs)
|
221 |
+
return outputs
|
222 |
+
|
223 |
+
return ckpt_forward
|
224 |
+
|
225 |
+
def forward(
|
226 |
+
self,
|
227 |
+
x: float["b n d"], # nosied input audio # noqa: F722
|
228 |
+
cond: float["b n d"], # masked cond audio # noqa: F722
|
229 |
+
text: int["b nt"], # text # noqa: F722
|
230 |
+
time: float["b"] | float[""], # time step # noqa: F821 F722
|
231 |
+
drop_audio_cond, # cfg for cond audio
|
232 |
+
drop_text, # cfg for text
|
233 |
+
mask: bool["b n"] | None = None, # noqa: F722
|
234 |
+
):
|
235 |
+
batch, seq_len = x.shape[0], x.shape[1]
|
236 |
+
if time.ndim == 0:
|
237 |
+
time = time.repeat(batch)
|
238 |
+
|
239 |
+
# t: conditioning time, x: noised input audio
|
240 |
+
t = self.time_embed(time)
|
241 |
+
text_embed = self.text_embed(text, seq_len, drop_text=drop_text)
|
242 |
+
x = self.input_embed(x, cond, drop_audio_cond=drop_audio_cond)
|
243 |
+
|
244 |
+
rope = self.rotary_embed.forward_from_seq_len(seq_len)
|
245 |
+
|
246 |
+
if self.long_skip_connection is not None:
|
247 |
+
residual = x
|
248 |
+
|
249 |
+
for block in self.transformer_blocks:
|
250 |
+
if self.checkpoint_activations:
|
251 |
+
x = torch.utils.checkpoint.checkpoint(
|
252 |
+
self.ckpt_wrapper(block), x, text_embed, t, mask, rope
|
253 |
+
)
|
254 |
+
else:
|
255 |
+
x = block(x, text_embed, t, mask=mask, rope=rope)
|
256 |
+
|
257 |
+
if self.long_skip_connection is not None:
|
258 |
+
x = self.long_skip_connection(torch.cat((x, residual), dim=-1))
|
259 |
+
|
260 |
+
x = self.norm_out(x, t)
|
261 |
+
output = self.proj_out(x)
|
262 |
+
|
263 |
+
return output
|
semantic_detokenizer/model/modules.py
ADDED
@@ -0,0 +1,223 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Dehua Tao)
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
|
16 |
+
"""
|
17 |
+
ein notation:
|
18 |
+
b - batch
|
19 |
+
n - sequence
|
20 |
+
nt - text sequence
|
21 |
+
nw - raw wave length
|
22 |
+
d - dimension
|
23 |
+
"""
|
24 |
+
|
25 |
+
from __future__ import annotations
|
26 |
+
|
27 |
+
# import math
|
28 |
+
# from typing import Optional
|
29 |
+
|
30 |
+
import torch
|
31 |
+
import torch.nn.functional as F
|
32 |
+
# import torchaudio
|
33 |
+
from librosa.filters import mel as librosa_mel_fn
|
34 |
+
from torch import nn
|
35 |
+
from x_transformers.x_transformers import apply_rotary_pos_emb
|
36 |
+
|
37 |
+
mel_basis_cache = {}
|
38 |
+
hann_window_cache = {}
|
39 |
+
|
40 |
+
from f5_tts.model.modules import AdaLayerNormZero, Attention, AttnProcessor, FeedForward
|
41 |
+
|
42 |
+
|
43 |
+
# Cross-attention with audio as query and text as key/value
|
44 |
+
|
45 |
+
|
46 |
+
class CrossAttention(nn.Module):
|
47 |
+
def __init__(
|
48 |
+
self,
|
49 |
+
processor: CrossAttnProcessor,
|
50 |
+
dim: int,
|
51 |
+
dim_to_k: int,
|
52 |
+
heads: int = 8,
|
53 |
+
dim_head: int = 64,
|
54 |
+
dropout: float = 0.0,
|
55 |
+
):
|
56 |
+
super().__init__()
|
57 |
+
|
58 |
+
if not hasattr(F, "scaled_dot_product_attention"):
|
59 |
+
raise ImportError(
|
60 |
+
"Attention equires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0."
|
61 |
+
)
|
62 |
+
|
63 |
+
self.processor = processor
|
64 |
+
|
65 |
+
self.dim = dim
|
66 |
+
self.heads = heads
|
67 |
+
self.inner_dim = dim_head * heads
|
68 |
+
self.dropout = dropout
|
69 |
+
|
70 |
+
self.to_q = nn.Linear(dim, self.inner_dim)
|
71 |
+
self.to_k = nn.Linear(dim_to_k, self.inner_dim)
|
72 |
+
self.to_v = nn.Linear(dim_to_k, self.inner_dim)
|
73 |
+
|
74 |
+
self.to_out = nn.ModuleList([])
|
75 |
+
self.to_out.append(nn.Linear(self.inner_dim, dim))
|
76 |
+
self.to_out.append(nn.Dropout(dropout))
|
77 |
+
|
78 |
+
def forward(
|
79 |
+
self,
|
80 |
+
x_for_q: float["b n d"], # (noisy + masked) audio input, x_for_q # noqa: F722
|
81 |
+
x_for_k: float["b n d"] = None, # text input, x_for_k # noqa: F722
|
82 |
+
mask: bool["b n"] | None = None, # noqa: F722
|
83 |
+
rope=None, # rotary position embedding for x
|
84 |
+
) -> torch.Tensor:
|
85 |
+
return self.processor(
|
86 |
+
self,
|
87 |
+
x_for_q,
|
88 |
+
x_for_k,
|
89 |
+
mask=mask,
|
90 |
+
rope=rope,
|
91 |
+
)
|
92 |
+
|
93 |
+
|
94 |
+
# Cross-attention processor
|
95 |
+
|
96 |
+
|
97 |
+
class CrossAttnProcessor:
|
98 |
+
def __init__(self):
|
99 |
+
pass
|
100 |
+
|
101 |
+
def __call__(
|
102 |
+
self,
|
103 |
+
attn: CrossAttention,
|
104 |
+
x_for_q: float["b n d"], # (noisy + masked) audio input, x_for_q # noqa: F722
|
105 |
+
x_for_k: float["b n d"], # text input, x_for_k # noqa: F722
|
106 |
+
mask: bool["b n"] | None = None, # noqa: F722
|
107 |
+
rope=None, # rotary position embedding
|
108 |
+
) -> torch.FloatTensor:
|
109 |
+
batch_size = x_for_q.shape[0]
|
110 |
+
|
111 |
+
# `sample` projections.
|
112 |
+
query = attn.to_q(x_for_q)
|
113 |
+
key = attn.to_k(x_for_k)
|
114 |
+
value = attn.to_v(x_for_k)
|
115 |
+
|
116 |
+
# apply rotary position embedding
|
117 |
+
if rope is not None:
|
118 |
+
freqs, xpos_scale = rope
|
119 |
+
q_xpos_scale, k_xpos_scale = (
|
120 |
+
(xpos_scale, xpos_scale**-1.0) if xpos_scale is not None else (1.0, 1.0)
|
121 |
+
)
|
122 |
+
|
123 |
+
query = apply_rotary_pos_emb(query, freqs, q_xpos_scale)
|
124 |
+
key = apply_rotary_pos_emb(key, freqs, k_xpos_scale)
|
125 |
+
|
126 |
+
# attention
|
127 |
+
inner_dim = key.shape[-1]
|
128 |
+
head_dim = inner_dim // attn.heads
|
129 |
+
query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)
|
130 |
+
key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)
|
131 |
+
value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)
|
132 |
+
|
133 |
+
# mask. e.g. inference got a batch with different target durations, mask out the padding
|
134 |
+
if mask is not None:
|
135 |
+
attn_mask = mask
|
136 |
+
attn_mask = attn_mask.unsqueeze(1).unsqueeze(1) # 'b n -> b 1 1 n'
|
137 |
+
attn_mask = attn_mask.expand(
|
138 |
+
batch_size, attn.heads, query.shape[-2], key.shape[-2]
|
139 |
+
)
|
140 |
+
else:
|
141 |
+
attn_mask = None
|
142 |
+
|
143 |
+
x = F.scaled_dot_product_attention(
|
144 |
+
query, key, value, attn_mask=attn_mask, dropout_p=0.0, is_causal=False
|
145 |
+
)
|
146 |
+
x = x.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim)
|
147 |
+
x = x.to(query.dtype)
|
148 |
+
|
149 |
+
# linear proj
|
150 |
+
x = attn.to_out[0](x)
|
151 |
+
# dropout
|
152 |
+
x = attn.to_out[1](x)
|
153 |
+
|
154 |
+
if mask is not None:
|
155 |
+
mask = mask.unsqueeze(-1)
|
156 |
+
x = x.masked_fill(~mask, 0.0)
|
157 |
+
|
158 |
+
return x
|
159 |
+
|
160 |
+
|
161 |
+
# Cross-attention DiT Block
|
162 |
+
|
163 |
+
|
164 |
+
class CADiTBlock(nn.Module):
|
165 |
+
def __init__(self, dim, text_dim, heads, dim_head, ff_mult=4, dropout=0.1):
|
166 |
+
super().__init__()
|
167 |
+
|
168 |
+
self.attn_norm = AdaLayerNormZero(dim)
|
169 |
+
self.attn = Attention(
|
170 |
+
processor=AttnProcessor(),
|
171 |
+
dim=dim,
|
172 |
+
heads=heads,
|
173 |
+
dim_head=dim_head,
|
174 |
+
dropout=dropout,
|
175 |
+
)
|
176 |
+
|
177 |
+
self.cross_attn_norm = nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6)
|
178 |
+
self.cross_attn = CrossAttention(
|
179 |
+
processor=CrossAttnProcessor(),
|
180 |
+
dim=dim,
|
181 |
+
dim_to_k=text_dim,
|
182 |
+
heads=heads,
|
183 |
+
dim_head=dim_head,
|
184 |
+
dropout=dropout,
|
185 |
+
)
|
186 |
+
|
187 |
+
self.ff_norm = nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6)
|
188 |
+
self.ff = FeedForward(
|
189 |
+
dim=dim, mult=ff_mult, dropout=dropout, approximate="tanh"
|
190 |
+
)
|
191 |
+
|
192 |
+
def forward(
|
193 |
+
self,
|
194 |
+
x,
|
195 |
+
y,
|
196 |
+
t,
|
197 |
+
mask=None,
|
198 |
+
rope=None,
|
199 |
+
): # x: audio input, y: text input, t: time embedding
|
200 |
+
|
201 |
+
## for self-attention
|
202 |
+
|
203 |
+
# pre-norm & modulation for attention input
|
204 |
+
norm, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.attn_norm(x, emb=t)
|
205 |
+
|
206 |
+
# attention
|
207 |
+
attn_output = self.attn(x=norm, mask=mask, rope=rope)
|
208 |
+
|
209 |
+
# process attention output for input x
|
210 |
+
x = x + gate_msa.unsqueeze(1) * attn_output
|
211 |
+
|
212 |
+
## for cross-attention
|
213 |
+
ca_norm = self.cross_attn_norm(x)
|
214 |
+
cross_attn_output = self.cross_attn(ca_norm, y, mask=mask, rope=rope)
|
215 |
+
x = x + cross_attn_output
|
216 |
+
|
217 |
+
norm = self.ff_norm(x) * (1 + scale_mlp[:, None]) + shift_mlp[:, None]
|
218 |
+
ff_output = self.ff(norm)
|
219 |
+
x = x + gate_mlp.unsqueeze(1) * ff_output
|
220 |
+
|
221 |
+
return x
|
222 |
+
|
223 |
+
|
semantic_detokenizer/patch_utils.py
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# copied from https://gitee.com/ascend/MindSpeed/blob/master/mindspeed/patch_utils.py
|
2 |
+
|
3 |
+
import importlib
|
4 |
+
import sys
|
5 |
+
import types
|
6 |
+
|
7 |
+
|
8 |
+
def get_func_name(func):
|
9 |
+
if isinstance(func, str):
|
10 |
+
return func
|
11 |
+
return '.'.join((func.__module__, func.__qualname__))
|
12 |
+
|
13 |
+
|
14 |
+
def dummy_function_wrapper(func_name):
|
15 |
+
def dummy_function(*args, **kwargs):
|
16 |
+
raise RuntimeError('function {} no exist'.format(func_name))
|
17 |
+
|
18 |
+
return dummy_function
|
19 |
+
|
20 |
+
|
21 |
+
class Patch:
|
22 |
+
def __init__(self, orig_func_name, new_func, create_dummy):
|
23 |
+
split_name = orig_func_name.rsplit('.', 1)
|
24 |
+
if len(split_name) == 1:
|
25 |
+
self.orig_module_name, self.orig_func_name = orig_func_name, None
|
26 |
+
else:
|
27 |
+
self.orig_module_name, self.orig_func_name = split_name
|
28 |
+
self.orig_module = None
|
29 |
+
self.orig_func = None
|
30 |
+
|
31 |
+
self.patch_func = None
|
32 |
+
self.wrappers = []
|
33 |
+
if new_func is None:
|
34 |
+
new_func = dummy_function_wrapper(orig_func_name)
|
35 |
+
self.set_patch_func(new_func)
|
36 |
+
self.is_applied = False
|
37 |
+
self.create_dummy = create_dummy
|
38 |
+
|
39 |
+
@property
|
40 |
+
def orig_func_id(self):
|
41 |
+
return id(self.orig_func)
|
42 |
+
|
43 |
+
@property
|
44 |
+
def patch_func_id(self):
|
45 |
+
return id(self.patch_func)
|
46 |
+
|
47 |
+
def set_patch_func(self, new_func, force_patch=False):
|
48 |
+
if hasattr(new_func, '__name__') and new_func.__name__.endswith(('wrapper', 'decorator')):
|
49 |
+
self.wrappers.append(new_func)
|
50 |
+
else:
|
51 |
+
if self.patch_func and not force_patch:
|
52 |
+
raise RuntimeError('the patch of {} exist !'.format(self.orig_func_name))
|
53 |
+
self.patch_func = new_func
|
54 |
+
self.is_applied = False
|
55 |
+
|
56 |
+
def apply_patch(self):
|
57 |
+
if self.is_applied:
|
58 |
+
return
|
59 |
+
|
60 |
+
self.orig_module, self.orig_func = Patch.parse_path(self.orig_module_name, self.orig_func_name, self.create_dummy)
|
61 |
+
|
62 |
+
final_patch_func = self.orig_func
|
63 |
+
if self.patch_func is not None:
|
64 |
+
final_patch_func = self.patch_func
|
65 |
+
|
66 |
+
for wrapper in self.wrappers:
|
67 |
+
final_patch_func = wrapper(final_patch_func)
|
68 |
+
|
69 |
+
if self.orig_func_name is not None:
|
70 |
+
setattr(self.orig_module, self.orig_func_name, final_patch_func)
|
71 |
+
for key, value in sys.modules.copy().items():
|
72 |
+
if self.orig_func_name is not None and hasattr(value, self.orig_func_name) \
|
73 |
+
and id(getattr(value, self.orig_func_name)) == self.orig_func_id:
|
74 |
+
setattr(value, self.orig_func_name, final_patch_func)
|
75 |
+
self.is_applied = True
|
76 |
+
|
77 |
+
@staticmethod
|
78 |
+
def parse_path(module_path, function_name, create_dummy):
|
79 |
+
from importlib.machinery import ModuleSpec
|
80 |
+
modules = module_path.split('.')
|
81 |
+
for i in range(1, len(modules) + 1):
|
82 |
+
parent = '.'.join(modules[:i - 1])
|
83 |
+
path = '.'.join(modules[:i])
|
84 |
+
try:
|
85 |
+
importlib.import_module(path)
|
86 |
+
except ModuleNotFoundError as e:
|
87 |
+
if not parent or not hasattr(importlib.import_module(parent), modules[i - 1]):
|
88 |
+
if not create_dummy:
|
89 |
+
raise ModuleNotFoundError(e) from e
|
90 |
+
sys.modules[path] = types.ModuleType(path)
|
91 |
+
sys.modules[path].__file__ = 'mindspeed.dummy_module.py'
|
92 |
+
sys.modules[path].__spec__ = ModuleSpec(path, None)
|
93 |
+
if parent:
|
94 |
+
setattr(importlib.import_module(parent), modules[i - 1], sys.modules[path])
|
95 |
+
else:
|
96 |
+
module = getattr(importlib.import_module(parent), modules[i - 1])
|
97 |
+
if hasattr(module, function_name):
|
98 |
+
return module, getattr(module, function_name)
|
99 |
+
elif create_dummy:
|
100 |
+
return module, dummy_function_wrapper(function_name)
|
101 |
+
else:
|
102 |
+
raise RuntimeError('no exist {} of {}'.format(function_name, module))
|
103 |
+
|
104 |
+
if function_name is not None and not hasattr(sys.modules[module_path], function_name):
|
105 |
+
setattr(sys.modules[module_path], function_name, None)
|
106 |
+
return sys.modules[module_path], getattr(sys.modules[module_path], function_name) if function_name is not None else None
|
107 |
+
|
108 |
+
|
109 |
+
class MindSpeedPatchesManager:
|
110 |
+
patches_info = {}
|
111 |
+
|
112 |
+
@staticmethod
|
113 |
+
def register_patch(orig_func_name, new_func=None, force_patch=False, create_dummy=False):
|
114 |
+
if orig_func_name not in MindSpeedPatchesManager.patches_info:
|
115 |
+
MindSpeedPatchesManager.patches_info[orig_func_name] = Patch(orig_func_name, new_func, create_dummy)
|
116 |
+
else:
|
117 |
+
MindSpeedPatchesManager.patches_info.get(orig_func_name).set_patch_func(new_func, force_patch)
|
118 |
+
|
119 |
+
@staticmethod
|
120 |
+
def apply_patches():
|
121 |
+
for patch in MindSpeedPatchesManager.patches_info.values():
|
122 |
+
patch.apply_patch()
|
semantic_detokenizer/requirements.txt
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Cython
|
2 |
+
numpy==1.23.4
|
3 |
+
g2p_en
|
4 |
+
jieba_fast
|
5 |
+
jieba
|
6 |
+
LangSegment>=0.2.0
|
7 |
+
wordsegment
|
8 |
+
pypinyin
|
9 |
+
cn2an
|
10 |
+
g2pM
|
11 |
+
#torch==2.1.0
|
12 |
+
WeTextProcessing
|
13 |
+
f5-tts==0.3.4
|
semantic_detokenizer/utils_infer.py
ADDED
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Dehua Tao,
|
2 |
+
# Xiao Chen)
|
3 |
+
|
4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
+
# you may not use this file except in compliance with the License.
|
6 |
+
# You may obtain a copy of the License at
|
7 |
+
|
8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9 |
+
|
10 |
+
# Unless required by applicable law or agreed to in writing, software
|
11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
+
# See the License for the specific language governing permissions and
|
14 |
+
# limitations under the License.
|
15 |
+
|
16 |
+
|
17 |
+
import os
|
18 |
+
import sys
|
19 |
+
|
20 |
+
os.environ["PYTOCH_ENABLE_MPS_FALLBACK"] = "1" # for MPS device compatibility
|
21 |
+
from importlib.resources import files
|
22 |
+
import matplotlib
|
23 |
+
|
24 |
+
matplotlib.use("Agg")
|
25 |
+
|
26 |
+
|
27 |
+
import numpy as np
|
28 |
+
import torch
|
29 |
+
import torchaudio
|
30 |
+
import tqdm
|
31 |
+
import logging
|
32 |
+
# torch.set_printoptions(profile="full")
|
33 |
+
# from f5_tts.model import CFM
|
34 |
+
from f5_tts.model.utils import (
|
35 |
+
get_tokenizer,
|
36 |
+
convert_char_to_pinyin,
|
37 |
+
)
|
38 |
+
from f5_tts.model.modules import MelSpec
|
39 |
+
|
40 |
+
|
41 |
+
device = (
|
42 |
+
"cuda"
|
43 |
+
if torch.cuda.is_available()
|
44 |
+
else (
|
45 |
+
"xpu"
|
46 |
+
if torch.xpu.is_available()
|
47 |
+
else "mps" if torch.backends.mps.is_available() else "cpu"
|
48 |
+
)
|
49 |
+
)
|
50 |
+
|
51 |
+
# -----------------------------------------
|
52 |
+
|
53 |
+
target_sample_rate = 24000
|
54 |
+
n_mel_channels = 100
|
55 |
+
hop_length = 256
|
56 |
+
win_length = 1024
|
57 |
+
n_fft = 1024
|
58 |
+
mel_spec_type = "vocos"
|
59 |
+
target_rms = 0.1
|
60 |
+
cross_fade_duration = 0.15
|
61 |
+
ode_method = "euler"
|
62 |
+
nfe_step = 32 # 16, 32
|
63 |
+
cfg_strength = 2.0
|
64 |
+
sway_sampling_coef = -1.0
|
65 |
+
speed = 1.0
|
66 |
+
fix_duration = None
|
67 |
+
seed = 3214
|
68 |
+
|
69 |
+
# -----------------------------------------
|
70 |
+
def chunk_infer_batch_process(
|
71 |
+
ref_audio,
|
72 |
+
ref_text,
|
73 |
+
gen_text_batches,
|
74 |
+
model_obj,
|
75 |
+
vocoder,
|
76 |
+
mel_spec_type="vocos",
|
77 |
+
progress=tqdm,
|
78 |
+
target_rms=0.1,
|
79 |
+
cross_fade_duration=0.15,
|
80 |
+
nfe_step=32,
|
81 |
+
cfg_strength=2.0,
|
82 |
+
sway_sampling_coef=-1.0,
|
83 |
+
speed=1.0,
|
84 |
+
fix_duration=None,
|
85 |
+
device=None,
|
86 |
+
chunk_cond_proportion=0.5,
|
87 |
+
chunk_look_ahead=0,
|
88 |
+
max_ref_duration=4.5,
|
89 |
+
ref_head_cut=False,
|
90 |
+
):
|
91 |
+
audio, sr = ref_audio
|
92 |
+
if audio.shape[0] > 1:
|
93 |
+
audio = torch.mean(audio, dim=0, keepdim=True)
|
94 |
+
|
95 |
+
rms = torch.sqrt(torch.mean(torch.square(audio)))
|
96 |
+
if rms < target_rms:
|
97 |
+
audio = audio * target_rms / rms
|
98 |
+
if sr != target_sample_rate:
|
99 |
+
resampler = torchaudio.transforms.Resample(sr, target_sample_rate)
|
100 |
+
audio = resampler(audio)
|
101 |
+
|
102 |
+
logging.info(
|
103 |
+
"audio shape:" + str(audio.shape) + "; ref_text shape:" + str(len(ref_text))
|
104 |
+
)
|
105 |
+
ref_duration = audio.shape[1] / target_sample_rate
|
106 |
+
if ref_duration > max_ref_duration:
|
107 |
+
reserved_ref_audio_len = round(max_ref_duration * target_sample_rate)
|
108 |
+
if ref_head_cut:
|
109 |
+
logging.info(f"Using the first {max_ref_duration} seconds as ref audio")
|
110 |
+
audio = audio[:, :reserved_ref_audio_len]
|
111 |
+
ref_text = ref_text[
|
112 |
+
: round(max_ref_duration * len(ref_text) / ref_duration)
|
113 |
+
]
|
114 |
+
else:
|
115 |
+
logging.info(f"Using the last {max_ref_duration} seconds as ref audio")
|
116 |
+
audio = audio[:, -reserved_ref_audio_len:]
|
117 |
+
ref_text = ref_text[
|
118 |
+
-round(max_ref_duration * len(ref_text) / ref_duration) :
|
119 |
+
]
|
120 |
+
logging.info(
|
121 |
+
"audio shape:" + str(audio.shape) + "; ref_text shape:" + str(len(ref_text))
|
122 |
+
)
|
123 |
+
audio = audio.to(device)
|
124 |
+
|
125 |
+
generated_waves = []
|
126 |
+
spectrograms = []
|
127 |
+
|
128 |
+
# fixed_ref_audio_len = audio.shape[-1] // hop_length
|
129 |
+
mel_spec_module = MelSpec(mel_spec_type=mel_spec_type)
|
130 |
+
fixed_ref_audio_mel_spec = mel_spec_module(audio)
|
131 |
+
# The last dim should be num_channels
|
132 |
+
fixed_ref_audio_mel_cond = fixed_ref_audio_mel_spec.permute(0, 2, 1)
|
133 |
+
fixed_ref_audio_len = fixed_ref_audio_mel_cond.shape[1]
|
134 |
+
|
135 |
+
assert isinstance(ref_text, list) is True
|
136 |
+
fixed_ref_text = ref_text[:]
|
137 |
+
fixed_ref_text_len = len(fixed_ref_text)
|
138 |
+
|
139 |
+
mel_cond = fixed_ref_audio_mel_cond.clone()
|
140 |
+
|
141 |
+
prev_chunk_audio_len = 0
|
142 |
+
|
143 |
+
for i, gen_text in enumerate(progress.tqdm(gen_text_batches)):
|
144 |
+
# Prepare the text
|
145 |
+
final_text_list = [ref_text + gen_text]
|
146 |
+
logging.info(f"final_text_list: {final_text_list}")
|
147 |
+
|
148 |
+
if fix_duration is not None:
|
149 |
+
duration = int(fix_duration * target_sample_rate / hop_length)
|
150 |
+
else:
|
151 |
+
# Calculate duration
|
152 |
+
assert isinstance(gen_text, list) is True
|
153 |
+
gen_text_len = len(gen_text)
|
154 |
+
duration = (
|
155 |
+
fixed_ref_audio_len
|
156 |
+
+ prev_chunk_audio_len
|
157 |
+
+ int(fixed_ref_audio_len / fixed_ref_text_len * gen_text_len / speed)
|
158 |
+
)
|
159 |
+
logging.info(f"Duration: {duration}")
|
160 |
+
|
161 |
+
# inference
|
162 |
+
with torch.inference_mode():
|
163 |
+
logging.info(f"generate with nfe_step:{nfe_step}, cfg_strength:{cfg_strength}, sway_sampling_coef:{sway_sampling_coef}")
|
164 |
+
# logging.info("mel_cond: " + str(mel_cond))
|
165 |
+
generated, _ = model_obj.sample(
|
166 |
+
cond=mel_cond,
|
167 |
+
text=final_text_list,
|
168 |
+
duration=duration,
|
169 |
+
steps=nfe_step,
|
170 |
+
cfg_strength=cfg_strength,
|
171 |
+
sway_sampling_coef=sway_sampling_coef,
|
172 |
+
seed=seed,
|
173 |
+
)
|
174 |
+
generated = generated.to(torch.float32)
|
175 |
+
logging.info("gen mel shape: " + str(generated.shape))
|
176 |
+
|
177 |
+
# try to remove condition mel
|
178 |
+
stripped_generated = generated[
|
179 |
+
:, (fixed_ref_audio_len + prev_chunk_audio_len) :, :
|
180 |
+
]
|
181 |
+
|
182 |
+
# remove chunk_look_ahead from the tail of each generated mel
|
183 |
+
look_ahead_mel_len = round(
|
184 |
+
(duration - fixed_ref_audio_len - prev_chunk_audio_len)
|
185 |
+
* chunk_look_ahead
|
186 |
+
/ len(gen_text)
|
187 |
+
)
|
188 |
+
if look_ahead_mel_len > 0 and i < len(gen_text_batches) - 1:
|
189 |
+
stripped_generated_without_look_ahead = stripped_generated[
|
190 |
+
:,
|
191 |
+
:(-look_ahead_mel_len),
|
192 |
+
:,
|
193 |
+
]
|
194 |
+
# try to remove the chunk_look_ahead from the tail of gen_text
|
195 |
+
gen_text = gen_text[:-chunk_look_ahead]
|
196 |
+
else:
|
197 |
+
stripped_generated_without_look_ahead = stripped_generated
|
198 |
+
logging.info("gen mel shape: %s, gen text len: %d" % (str(stripped_generated_without_look_ahead.shape), len(gen_text)))
|
199 |
+
# logging.info("generated mel: " + str(generated))
|
200 |
+
|
201 |
+
# prev chunk audio len is the length without fixed condition and chunk look ahead
|
202 |
+
prev_chunk_audio_len = stripped_generated_without_look_ahead.shape[1]
|
203 |
+
# prev_chunk_audio_len_with_look_ahead = stripped_generated.shape[1]
|
204 |
+
|
205 |
+
# generate wav with look ahead
|
206 |
+
generated_mel_spec = stripped_generated_without_look_ahead.permute(0, 2, 1)
|
207 |
+
# generated_mel_spec = stripped_generated.permute(0, 2, 1)
|
208 |
+
|
209 |
+
if mel_spec_type == "vocos":
|
210 |
+
generated_wave = vocoder.decode(generated_mel_spec)
|
211 |
+
elif mel_spec_type == "bigvgan":
|
212 |
+
generated_wave = vocoder(generated_mel_spec)
|
213 |
+
|
214 |
+
# strip look ahead wav from generated wav
|
215 |
+
# if look_ahead_mel_len > 0 and i < len(gen_text_batches) - 1:
|
216 |
+
# look_ahead_wav_len = round(
|
217 |
+
# look_ahead_mel_len
|
218 |
+
# * generated_wave.shape[1]
|
219 |
+
# / prev_chunk_audio_len_with_look_ahead
|
220 |
+
# )
|
221 |
+
# generated_wave = generated_wave[:, :-look_ahead_wav_len]
|
222 |
+
|
223 |
+
if rms < target_rms:
|
224 |
+
generated_wave = generated_wave * rms / target_rms
|
225 |
+
|
226 |
+
logging.info("gen wav shape: " + str(generated_wave.shape))
|
227 |
+
# logging.info("generated wav: " + str(generated_wave))
|
228 |
+
|
229 |
+
# wav -> numpy
|
230 |
+
generated_wave = generated_wave.squeeze().cpu().numpy()
|
231 |
+
|
232 |
+
generated_waves.append(generated_wave)
|
233 |
+
spectrograms.append(generated_mel_spec[0].cpu().numpy())
|
234 |
+
|
235 |
+
prev_chunk_cond_audio_len = round(chunk_cond_proportion * prev_chunk_audio_len)
|
236 |
+
if prev_chunk_audio_len > prev_chunk_cond_audio_len:
|
237 |
+
gen_text_cond = gen_text[-round(chunk_cond_proportion * len(gen_text)):]
|
238 |
+
prev_chunk_audio_len = prev_chunk_cond_audio_len
|
239 |
+
generated_cond = stripped_generated_without_look_ahead[:, (-prev_chunk_audio_len):, :]
|
240 |
+
else:
|
241 |
+
generated_cond = stripped_generated_without_look_ahead
|
242 |
+
gen_text_cond = gen_text
|
243 |
+
|
244 |
+
logging.info("gen text cond len: %d, gen mel cond len: %d" % (len(gen_text_cond), len(generated_cond)))
|
245 |
+
|
246 |
+
ref_text = fixed_ref_text + gen_text_cond
|
247 |
+
mel_cond = torch.cat([fixed_ref_audio_mel_cond, generated_cond], dim=1)
|
248 |
+
|
249 |
+
# Combine all generated waves with cross-fading
|
250 |
+
if cross_fade_duration <= 0:
|
251 |
+
# Simply concatenate
|
252 |
+
logging.info("simply concatenate")
|
253 |
+
final_wave = np.concatenate(generated_waves)
|
254 |
+
else:
|
255 |
+
final_wave = generated_waves[0]
|
256 |
+
for i in range(1, len(generated_waves)):
|
257 |
+
prev_wave = final_wave
|
258 |
+
next_wave = generated_waves[i]
|
259 |
+
|
260 |
+
# Calculate cross-fade samples, ensuring it does not exceed wave lengths
|
261 |
+
cross_fade_samples = int(cross_fade_duration * target_sample_rate)
|
262 |
+
cross_fade_samples = min(cross_fade_samples, len(prev_wave), len(next_wave))
|
263 |
+
|
264 |
+
if cross_fade_samples <= 0:
|
265 |
+
# No overlap possible, concatenate
|
266 |
+
final_wave = np.concatenate([prev_wave, next_wave])
|
267 |
+
continue
|
268 |
+
|
269 |
+
# Overlapping parts
|
270 |
+
prev_overlap = prev_wave[-cross_fade_samples:]
|
271 |
+
next_overlap = next_wave[:cross_fade_samples]
|
272 |
+
|
273 |
+
# Fade out and fade in
|
274 |
+
# fade_out = np.linspace(1, 0, cross_fade_samples)
|
275 |
+
# fade_in = np.linspace(0, 1, cross_fade_samples)
|
276 |
+
|
277 |
+
wave_window = np.hamming(2 * cross_fade_samples)
|
278 |
+
fade_out = wave_window[cross_fade_samples:]
|
279 |
+
fade_in = wave_window[:cross_fade_samples]
|
280 |
+
|
281 |
+
# Cross-faded overlap
|
282 |
+
cross_faded_overlap = prev_overlap * fade_out + next_overlap * fade_in
|
283 |
+
|
284 |
+
# Combine
|
285 |
+
new_wave = np.concatenate(
|
286 |
+
[
|
287 |
+
prev_wave[:-cross_fade_samples],
|
288 |
+
cross_faded_overlap,
|
289 |
+
next_wave[cross_fade_samples:],
|
290 |
+
]
|
291 |
+
)
|
292 |
+
|
293 |
+
final_wave = new_wave
|
294 |
+
|
295 |
+
# Create a combined spectrogram
|
296 |
+
combined_spectrogram = np.concatenate(spectrograms, axis=1)
|
297 |
+
|
298 |
+
return final_wave, target_sample_rate, combined_spectrogram
|
semantic_tokenizer/__init__.py
ADDED
File without changes
|
semantic_tokenizer/f40ms/README.md
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SpeechTokenizerInference
|
2 |
+
|
3 |
+
# 25Hz(80ms) speech semantic tokenizer based on fairseq
|
4 |
+
|
5 |
+
pip install -r requirements_npu.txt
|
6 |
+
|
7 |
+
|
semantic_tokenizer/f40ms/__init__.py
ADDED
File without changes
|
semantic_tokenizer/f40ms/ckpt/model.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c65b209e916297d6b1b2692c04ebc765da894e71beab290da7131b7892d9262d
|
3 |
+
size 3866980554
|
semantic_tokenizer/f40ms/ckpt/model.pt.md5
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
960e4618b84d277d646c22e5f6e9754d model.pt
|
semantic_tokenizer/f40ms/config/dict.km.txt
ADDED
@@ -0,0 +1,1000 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
0 1
|
2 |
+
1 1
|
3 |
+
2 1
|
4 |
+
3 1
|
5 |
+
4 1
|
6 |
+
5 1
|
7 |
+
6 1
|
8 |
+
7 1
|
9 |
+
8 1
|
10 |
+
9 1
|
11 |
+
10 1
|
12 |
+
11 1
|
13 |
+
12 1
|
14 |
+
13 1
|
15 |
+
14 1
|
16 |
+
15 1
|
17 |
+
16 1
|
18 |
+
17 1
|
19 |
+
18 1
|
20 |
+
19 1
|
21 |
+
20 1
|
22 |
+
21 1
|
23 |
+
22 1
|
24 |
+
23 1
|
25 |
+
24 1
|
26 |
+
25 1
|
27 |
+
26 1
|
28 |
+
27 1
|
29 |
+
28 1
|
30 |
+
29 1
|
31 |
+
30 1
|
32 |
+
31 1
|
33 |
+
32 1
|
34 |
+
33 1
|
35 |
+
34 1
|
36 |
+
35 1
|
37 |
+
36 1
|
38 |
+
37 1
|
39 |
+
38 1
|
40 |
+
39 1
|
41 |
+
40 1
|
42 |
+
41 1
|
43 |
+
42 1
|
44 |
+
43 1
|
45 |
+
44 1
|
46 |
+
45 1
|
47 |
+
46 1
|
48 |
+
47 1
|
49 |
+
48 1
|
50 |
+
49 1
|
51 |
+
50 1
|
52 |
+
51 1
|
53 |
+
52 1
|
54 |
+
53 1
|
55 |
+
54 1
|
56 |
+
55 1
|
57 |
+
56 1
|
58 |
+
57 1
|
59 |
+
58 1
|
60 |
+
59 1
|
61 |
+
60 1
|
62 |
+
61 1
|
63 |
+
62 1
|
64 |
+
63 1
|
65 |
+
64 1
|
66 |
+
65 1
|
67 |
+
66 1
|
68 |
+
67 1
|
69 |
+
68 1
|
70 |
+
69 1
|
71 |
+
70 1
|
72 |
+
71 1
|
73 |
+
72 1
|
74 |
+
73 1
|
75 |
+
74 1
|
76 |
+
75 1
|
77 |
+
76 1
|
78 |
+
77 1
|
79 |
+
78 1
|
80 |
+
79 1
|
81 |
+
80 1
|
82 |
+
81 1
|
83 |
+
82 1
|
84 |
+
83 1
|
85 |
+
84 1
|
86 |
+
85 1
|
87 |
+
86 1
|
88 |
+
87 1
|
89 |
+
88 1
|
90 |
+
89 1
|
91 |
+
90 1
|
92 |
+
91 1
|
93 |
+
92 1
|
94 |
+
93 1
|
95 |
+
94 1
|
96 |
+
95 1
|
97 |
+
96 1
|
98 |
+
97 1
|
99 |
+
98 1
|
100 |
+
99 1
|
101 |
+
100 1
|
102 |
+
101 1
|
103 |
+
102 1
|
104 |
+
103 1
|
105 |
+
104 1
|
106 |
+
105 1
|
107 |
+
106 1
|
108 |
+
107 1
|
109 |
+
108 1
|
110 |
+
109 1
|
111 |
+
110 1
|
112 |
+
111 1
|
113 |
+
112 1
|
114 |
+
113 1
|
115 |
+
114 1
|
116 |
+
115 1
|
117 |
+
116 1
|
118 |
+
117 1
|
119 |
+
118 1
|
120 |
+
119 1
|
121 |
+
120 1
|
122 |
+
121 1
|
123 |
+
122 1
|
124 |
+
123 1
|
125 |
+
124 1
|
126 |
+
125 1
|
127 |
+
126 1
|
128 |
+
127 1
|
129 |
+
128 1
|
130 |
+
129 1
|
131 |
+
130 1
|
132 |
+
131 1
|
133 |
+
132 1
|
134 |
+
133 1
|
135 |
+
134 1
|
136 |
+
135 1
|
137 |
+
136 1
|
138 |
+
137 1
|
139 |
+
138 1
|
140 |
+
139 1
|
141 |
+
140 1
|
142 |
+
141 1
|
143 |
+
142 1
|
144 |
+
143 1
|
145 |
+
144 1
|
146 |
+
145 1
|
147 |
+
146 1
|
148 |
+
147 1
|
149 |
+
148 1
|
150 |
+
149 1
|
151 |
+
150 1
|
152 |
+
151 1
|
153 |
+
152 1
|
154 |
+
153 1
|
155 |
+
154 1
|
156 |
+
155 1
|
157 |
+
156 1
|
158 |
+
157 1
|
159 |
+
158 1
|
160 |
+
159 1
|
161 |
+
160 1
|
162 |
+
161 1
|
163 |
+
162 1
|
164 |
+
163 1
|
165 |
+
164 1
|
166 |
+
165 1
|
167 |
+
166 1
|
168 |
+
167 1
|
169 |
+
168 1
|
170 |
+
169 1
|
171 |
+
170 1
|
172 |
+
171 1
|
173 |
+
172 1
|
174 |
+
173 1
|
175 |
+
174 1
|
176 |
+
175 1
|
177 |
+
176 1
|
178 |
+
177 1
|
179 |
+
178 1
|
180 |
+
179 1
|
181 |
+
180 1
|
182 |
+
181 1
|
183 |
+
182 1
|
184 |
+
183 1
|
185 |
+
184 1
|
186 |
+
185 1
|
187 |
+
186 1
|
188 |
+
187 1
|
189 |
+
188 1
|
190 |
+
189 1
|
191 |
+
190 1
|
192 |
+
191 1
|
193 |
+
192 1
|
194 |
+
193 1
|
195 |
+
194 1
|
196 |
+
195 1
|
197 |
+
196 1
|
198 |
+
197 1
|
199 |
+
198 1
|
200 |
+
199 1
|
201 |
+
200 1
|
202 |
+
201 1
|
203 |
+
202 1
|
204 |
+
203 1
|
205 |
+
204 1
|
206 |
+
205 1
|
207 |
+
206 1
|
208 |
+
207 1
|
209 |
+
208 1
|
210 |
+
209 1
|
211 |
+
210 1
|
212 |
+
211 1
|
213 |
+
212 1
|
214 |
+
213 1
|
215 |
+
214 1
|
216 |
+
215 1
|
217 |
+
216 1
|
218 |
+
217 1
|
219 |
+
218 1
|
220 |
+
219 1
|
221 |
+
220 1
|
222 |
+
221 1
|
223 |
+
222 1
|
224 |
+
223 1
|
225 |
+
224 1
|
226 |
+
225 1
|
227 |
+
226 1
|
228 |
+
227 1
|
229 |
+
228 1
|
230 |
+
229 1
|
231 |
+
230 1
|
232 |
+
231 1
|
233 |
+
232 1
|
234 |
+
233 1
|
235 |
+
234 1
|
236 |
+
235 1
|
237 |
+
236 1
|
238 |
+
237 1
|
239 |
+
238 1
|
240 |
+
239 1
|
241 |
+
240 1
|
242 |
+
241 1
|
243 |
+
242 1
|
244 |
+
243 1
|
245 |
+
244 1
|
246 |
+
245 1
|
247 |
+
246 1
|
248 |
+
247 1
|
249 |
+
248 1
|
250 |
+
249 1
|
251 |
+
250 1
|
252 |
+
251 1
|
253 |
+
252 1
|
254 |
+
253 1
|
255 |
+
254 1
|
256 |
+
255 1
|
257 |
+
256 1
|
258 |
+
257 1
|
259 |
+
258 1
|
260 |
+
259 1
|
261 |
+
260 1
|
262 |
+
261 1
|
263 |
+
262 1
|
264 |
+
263 1
|
265 |
+
264 1
|
266 |
+
265 1
|
267 |
+
266 1
|
268 |
+
267 1
|
269 |
+
268 1
|
270 |
+
269 1
|
271 |
+
270 1
|
272 |
+
271 1
|
273 |
+
272 1
|
274 |
+
273 1
|
275 |
+
274 1
|
276 |
+
275 1
|
277 |
+
276 1
|
278 |
+
277 1
|
279 |
+
278 1
|
280 |
+
279 1
|
281 |
+
280 1
|
282 |
+
281 1
|
283 |
+
282 1
|
284 |
+
283 1
|
285 |
+
284 1
|
286 |
+
285 1
|
287 |
+
286 1
|
288 |
+
287 1
|
289 |
+
288 1
|
290 |
+
289 1
|
291 |
+
290 1
|
292 |
+
291 1
|
293 |
+
292 1
|
294 |
+
293 1
|
295 |
+
294 1
|
296 |
+
295 1
|
297 |
+
296 1
|
298 |
+
297 1
|
299 |
+
298 1
|
300 |
+
299 1
|
301 |
+
300 1
|
302 |
+
301 1
|
303 |
+
302 1
|
304 |
+
303 1
|
305 |
+
304 1
|
306 |
+
305 1
|
307 |
+
306 1
|
308 |
+
307 1
|
309 |
+
308 1
|
310 |
+
309 1
|
311 |
+
310 1
|
312 |
+
311 1
|
313 |
+
312 1
|
314 |
+
313 1
|
315 |
+
314 1
|
316 |
+
315 1
|
317 |
+
316 1
|
318 |
+
317 1
|
319 |
+
318 1
|
320 |
+
319 1
|
321 |
+
320 1
|
322 |
+
321 1
|
323 |
+
322 1
|
324 |
+
323 1
|
325 |
+
324 1
|
326 |
+
325 1
|
327 |
+
326 1
|
328 |
+
327 1
|
329 |
+
328 1
|
330 |
+
329 1
|
331 |
+
330 1
|
332 |
+
331 1
|
333 |
+
332 1
|
334 |
+
333 1
|
335 |
+
334 1
|
336 |
+
335 1
|
337 |
+
336 1
|
338 |
+
337 1
|
339 |
+
338 1
|
340 |
+
339 1
|
341 |
+
340 1
|
342 |
+
341 1
|
343 |
+
342 1
|
344 |
+
343 1
|
345 |
+
344 1
|
346 |
+
345 1
|
347 |
+
346 1
|
348 |
+
347 1
|
349 |
+
348 1
|
350 |
+
349 1
|
351 |
+
350 1
|
352 |
+
351 1
|
353 |
+
352 1
|
354 |
+
353 1
|
355 |
+
354 1
|
356 |
+
355 1
|
357 |
+
356 1
|
358 |
+
357 1
|
359 |
+
358 1
|
360 |
+
359 1
|
361 |
+
360 1
|
362 |
+
361 1
|
363 |
+
362 1
|
364 |
+
363 1
|
365 |
+
364 1
|
366 |
+
365 1
|
367 |
+
366 1
|
368 |
+
367 1
|
369 |
+
368 1
|
370 |
+
369 1
|
371 |
+
370 1
|
372 |
+
371 1
|
373 |
+
372 1
|
374 |
+
373 1
|
375 |
+
374 1
|
376 |
+
375 1
|
377 |
+
376 1
|
378 |
+
377 1
|
379 |
+
378 1
|
380 |
+
379 1
|
381 |
+
380 1
|
382 |
+
381 1
|
383 |
+
382 1
|
384 |
+
383 1
|
385 |
+
384 1
|
386 |
+
385 1
|
387 |
+
386 1
|
388 |
+
387 1
|
389 |
+
388 1
|
390 |
+
389 1
|
391 |
+
390 1
|
392 |
+
391 1
|
393 |
+
392 1
|
394 |
+
393 1
|
395 |
+
394 1
|
396 |
+
395 1
|
397 |
+
396 1
|
398 |
+
397 1
|
399 |
+
398 1
|
400 |
+
399 1
|
401 |
+
400 1
|
402 |
+
401 1
|
403 |
+
402 1
|
404 |
+
403 1
|
405 |
+
404 1
|
406 |
+
405 1
|
407 |
+
406 1
|
408 |
+
407 1
|
409 |
+
408 1
|
410 |
+
409 1
|
411 |
+
410 1
|
412 |
+
411 1
|
413 |
+
412 1
|
414 |
+
413 1
|
415 |
+
414 1
|
416 |
+
415 1
|
417 |
+
416 1
|
418 |
+
417 1
|
419 |
+
418 1
|
420 |
+
419 1
|
421 |
+
420 1
|
422 |
+
421 1
|
423 |
+
422 1
|
424 |
+
423 1
|
425 |
+
424 1
|
426 |
+
425 1
|
427 |
+
426 1
|
428 |
+
427 1
|
429 |
+
428 1
|
430 |
+
429 1
|
431 |
+
430 1
|
432 |
+
431 1
|
433 |
+
432 1
|
434 |
+
433 1
|
435 |
+
434 1
|
436 |
+
435 1
|
437 |
+
436 1
|
438 |
+
437 1
|
439 |
+
438 1
|
440 |
+
439 1
|
441 |
+
440 1
|
442 |
+
441 1
|
443 |
+
442 1
|
444 |
+
443 1
|
445 |
+
444 1
|
446 |
+
445 1
|
447 |
+
446 1
|
448 |
+
447 1
|
449 |
+
448 1
|
450 |
+
449 1
|
451 |
+
450 1
|
452 |
+
451 1
|
453 |
+
452 1
|
454 |
+
453 1
|
455 |
+
454 1
|
456 |
+
455 1
|
457 |
+
456 1
|
458 |
+
457 1
|
459 |
+
458 1
|
460 |
+
459 1
|
461 |
+
460 1
|
462 |
+
461 1
|
463 |
+
462 1
|
464 |
+
463 1
|
465 |
+
464 1
|
466 |
+
465 1
|
467 |
+
466 1
|
468 |
+
467 1
|
469 |
+
468 1
|
470 |
+
469 1
|
471 |
+
470 1
|
472 |
+
471 1
|
473 |
+
472 1
|
474 |
+
473 1
|
475 |
+
474 1
|
476 |
+
475 1
|
477 |
+
476 1
|
478 |
+
477 1
|
479 |
+
478 1
|
480 |
+
479 1
|
481 |
+
480 1
|
482 |
+
481 1
|
483 |
+
482 1
|
484 |
+
483 1
|
485 |
+
484 1
|
486 |
+
485 1
|
487 |
+
486 1
|
488 |
+
487 1
|
489 |
+
488 1
|
490 |
+
489 1
|
491 |
+
490 1
|
492 |
+
491 1
|
493 |
+
492 1
|
494 |
+
493 1
|
495 |
+
494 1
|
496 |
+
495 1
|
497 |
+
496 1
|
498 |
+
497 1
|
499 |
+
498 1
|
500 |
+
499 1
|
501 |
+
500 1
|
502 |
+
501 1
|
503 |
+
502 1
|
504 |
+
503 1
|
505 |
+
504 1
|
506 |
+
505 1
|
507 |
+
506 1
|
508 |
+
507 1
|
509 |
+
508 1
|
510 |
+
509 1
|
511 |
+
510 1
|
512 |
+
511 1
|
513 |
+
512 1
|
514 |
+
513 1
|
515 |
+
514 1
|
516 |
+
515 1
|
517 |
+
516 1
|
518 |
+
517 1
|
519 |
+
518 1
|
520 |
+
519 1
|
521 |
+
520 1
|
522 |
+
521 1
|
523 |
+
522 1
|
524 |
+
523 1
|
525 |
+
524 1
|
526 |
+
525 1
|
527 |
+
526 1
|
528 |
+
527 1
|
529 |
+
528 1
|
530 |
+
529 1
|
531 |
+
530 1
|
532 |
+
531 1
|
533 |
+
532 1
|
534 |
+
533 1
|
535 |
+
534 1
|
536 |
+
535 1
|
537 |
+
536 1
|
538 |
+
537 1
|
539 |
+
538 1
|
540 |
+
539 1
|
541 |
+
540 1
|
542 |
+
541 1
|
543 |
+
542 1
|
544 |
+
543 1
|
545 |
+
544 1
|
546 |
+
545 1
|
547 |
+
546 1
|
548 |
+
547 1
|
549 |
+
548 1
|
550 |
+
549 1
|
551 |
+
550 1
|
552 |
+
551 1
|
553 |
+
552 1
|
554 |
+
553 1
|
555 |
+
554 1
|
556 |
+
555 1
|
557 |
+
556 1
|
558 |
+
557 1
|
559 |
+
558 1
|
560 |
+
559 1
|
561 |
+
560 1
|
562 |
+
561 1
|
563 |
+
562 1
|
564 |
+
563 1
|
565 |
+
564 1
|
566 |
+
565 1
|
567 |
+
566 1
|
568 |
+
567 1
|
569 |
+
568 1
|
570 |
+
569 1
|
571 |
+
570 1
|
572 |
+
571 1
|
573 |
+
572 1
|
574 |
+
573 1
|
575 |
+
574 1
|
576 |
+
575 1
|
577 |
+
576 1
|
578 |
+
577 1
|
579 |
+
578 1
|
580 |
+
579 1
|
581 |
+
580 1
|
582 |
+
581 1
|
583 |
+
582 1
|
584 |
+
583 1
|
585 |
+
584 1
|
586 |
+
585 1
|
587 |
+
586 1
|
588 |
+
587 1
|
589 |
+
588 1
|
590 |
+
589 1
|
591 |
+
590 1
|
592 |
+
591 1
|
593 |
+
592 1
|
594 |
+
593 1
|
595 |
+
594 1
|
596 |
+
595 1
|
597 |
+
596 1
|
598 |
+
597 1
|
599 |
+
598 1
|
600 |
+
599 1
|
601 |
+
600 1
|
602 |
+
601 1
|
603 |
+
602 1
|
604 |
+
603 1
|
605 |
+
604 1
|
606 |
+
605 1
|
607 |
+
606 1
|
608 |
+
607 1
|
609 |
+
608 1
|
610 |
+
609 1
|
611 |
+
610 1
|
612 |
+
611 1
|
613 |
+
612 1
|
614 |
+
613 1
|
615 |
+
614 1
|
616 |
+
615 1
|
617 |
+
616 1
|
618 |
+
617 1
|
619 |
+
618 1
|
620 |
+
619 1
|
621 |
+
620 1
|
622 |
+
621 1
|
623 |
+
622 1
|
624 |
+
623 1
|
625 |
+
624 1
|
626 |
+
625 1
|
627 |
+
626 1
|
628 |
+
627 1
|
629 |
+
628 1
|
630 |
+
629 1
|
631 |
+
630 1
|
632 |
+
631 1
|
633 |
+
632 1
|
634 |
+
633 1
|
635 |
+
634 1
|
636 |
+
635 1
|
637 |
+
636 1
|
638 |
+
637 1
|
639 |
+
638 1
|
640 |
+
639 1
|
641 |
+
640 1
|
642 |
+
641 1
|
643 |
+
642 1
|
644 |
+
643 1
|
645 |
+
644 1
|
646 |
+
645 1
|
647 |
+
646 1
|
648 |
+
647 1
|
649 |
+
648 1
|
650 |
+
649 1
|
651 |
+
650 1
|
652 |
+
651 1
|
653 |
+
652 1
|
654 |
+
653 1
|
655 |
+
654 1
|
656 |
+
655 1
|
657 |
+
656 1
|
658 |
+
657 1
|
659 |
+
658 1
|
660 |
+
659 1
|
661 |
+
660 1
|
662 |
+
661 1
|
663 |
+
662 1
|
664 |
+
663 1
|
665 |
+
664 1
|
666 |
+
665 1
|
667 |
+
666 1
|
668 |
+
667 1
|
669 |
+
668 1
|
670 |
+
669 1
|
671 |
+
670 1
|
672 |
+
671 1
|
673 |
+
672 1
|
674 |
+
673 1
|
675 |
+
674 1
|
676 |
+
675 1
|
677 |
+
676 1
|
678 |
+
677 1
|
679 |
+
678 1
|
680 |
+
679 1
|
681 |
+
680 1
|
682 |
+
681 1
|
683 |
+
682 1
|
684 |
+
683 1
|
685 |
+
684 1
|
686 |
+
685 1
|
687 |
+
686 1
|
688 |
+
687 1
|
689 |
+
688 1
|
690 |
+
689 1
|
691 |
+
690 1
|
692 |
+
691 1
|
693 |
+
692 1
|
694 |
+
693 1
|
695 |
+
694 1
|
696 |
+
695 1
|
697 |
+
696 1
|
698 |
+
697 1
|
699 |
+
698 1
|
700 |
+
699 1
|
701 |
+
700 1
|
702 |
+
701 1
|
703 |
+
702 1
|
704 |
+
703 1
|
705 |
+
704 1
|
706 |
+
705 1
|
707 |
+
706 1
|
708 |
+
707 1
|
709 |
+
708 1
|
710 |
+
709 1
|
711 |
+
710 1
|
712 |
+
711 1
|
713 |
+
712 1
|
714 |
+
713 1
|
715 |
+
714 1
|
716 |
+
715 1
|
717 |
+
716 1
|
718 |
+
717 1
|
719 |
+
718 1
|
720 |
+
719 1
|
721 |
+
720 1
|
722 |
+
721 1
|
723 |
+
722 1
|
724 |
+
723 1
|
725 |
+
724 1
|
726 |
+
725 1
|
727 |
+
726 1
|
728 |
+
727 1
|
729 |
+
728 1
|
730 |
+
729 1
|
731 |
+
730 1
|
732 |
+
731 1
|
733 |
+
732 1
|
734 |
+
733 1
|
735 |
+
734 1
|
736 |
+
735 1
|
737 |
+
736 1
|
738 |
+
737 1
|
739 |
+
738 1
|
740 |
+
739 1
|
741 |
+
740 1
|
742 |
+
741 1
|
743 |
+
742 1
|
744 |
+
743 1
|
745 |
+
744 1
|
746 |
+
745 1
|
747 |
+
746 1
|
748 |
+
747 1
|
749 |
+
748 1
|
750 |
+
749 1
|
751 |
+
750 1
|
752 |
+
751 1
|
753 |
+
752 1
|
754 |
+
753 1
|
755 |
+
754 1
|
756 |
+
755 1
|
757 |
+
756 1
|
758 |
+
757 1
|
759 |
+
758 1
|
760 |
+
759 1
|
761 |
+
760 1
|
762 |
+
761 1
|
763 |
+
762 1
|
764 |
+
763 1
|
765 |
+
764 1
|
766 |
+
765 1
|
767 |
+
766 1
|
768 |
+
767 1
|
769 |
+
768 1
|
770 |
+
769 1
|
771 |
+
770 1
|
772 |
+
771 1
|
773 |
+
772 1
|
774 |
+
773 1
|
775 |
+
774 1
|
776 |
+
775 1
|
777 |
+
776 1
|
778 |
+
777 1
|
779 |
+
778 1
|
780 |
+
779 1
|
781 |
+
780 1
|
782 |
+
781 1
|
783 |
+
782 1
|
784 |
+
783 1
|
785 |
+
784 1
|
786 |
+
785 1
|
787 |
+
786 1
|
788 |
+
787 1
|
789 |
+
788 1
|
790 |
+
789 1
|
791 |
+
790 1
|
792 |
+
791 1
|
793 |
+
792 1
|
794 |
+
793 1
|
795 |
+
794 1
|
796 |
+
795 1
|
797 |
+
796 1
|
798 |
+
797 1
|
799 |
+
798 1
|
800 |
+
799 1
|
801 |
+
800 1
|
802 |
+
801 1
|
803 |
+
802 1
|
804 |
+
803 1
|
805 |
+
804 1
|
806 |
+
805 1
|
807 |
+
806 1
|
808 |
+
807 1
|
809 |
+
808 1
|
810 |
+
809 1
|
811 |
+
810 1
|
812 |
+
811 1
|
813 |
+
812 1
|
814 |
+
813 1
|
815 |
+
814 1
|
816 |
+
815 1
|
817 |
+
816 1
|
818 |
+
817 1
|
819 |
+
818 1
|
820 |
+
819 1
|
821 |
+
820 1
|
822 |
+
821 1
|
823 |
+
822 1
|
824 |
+
823 1
|
825 |
+
824 1
|
826 |
+
825 1
|
827 |
+
826 1
|
828 |
+
827 1
|
829 |
+
828 1
|
830 |
+
829 1
|
831 |
+
830 1
|
832 |
+
831 1
|
833 |
+
832 1
|
834 |
+
833 1
|
835 |
+
834 1
|
836 |
+
835 1
|
837 |
+
836 1
|
838 |
+
837 1
|
839 |
+
838 1
|
840 |
+
839 1
|
841 |
+
840 1
|
842 |
+
841 1
|
843 |
+
842 1
|
844 |
+
843 1
|
845 |
+
844 1
|
846 |
+
845 1
|
847 |
+
846 1
|
848 |
+
847 1
|
849 |
+
848 1
|
850 |
+
849 1
|
851 |
+
850 1
|
852 |
+
851 1
|
853 |
+
852 1
|
854 |
+
853 1
|
855 |
+
854 1
|
856 |
+
855 1
|
857 |
+
856 1
|
858 |
+
857 1
|
859 |
+
858 1
|
860 |
+
859 1
|
861 |
+
860 1
|
862 |
+
861 1
|
863 |
+
862 1
|
864 |
+
863 1
|
865 |
+
864 1
|
866 |
+
865 1
|
867 |
+
866 1
|
868 |
+
867 1
|
869 |
+
868 1
|
870 |
+
869 1
|
871 |
+
870 1
|
872 |
+
871 1
|
873 |
+
872 1
|
874 |
+
873 1
|
875 |
+
874 1
|
876 |
+
875 1
|
877 |
+
876 1
|
878 |
+
877 1
|
879 |
+
878 1
|
880 |
+
879 1
|
881 |
+
880 1
|
882 |
+
881 1
|
883 |
+
882 1
|
884 |
+
883 1
|
885 |
+
884 1
|
886 |
+
885 1
|
887 |
+
886 1
|
888 |
+
887 1
|
889 |
+
888 1
|
890 |
+
889 1
|
891 |
+
890 1
|
892 |
+
891 1
|
893 |
+
892 1
|
894 |
+
893 1
|
895 |
+
894 1
|
896 |
+
895 1
|
897 |
+
896 1
|
898 |
+
897 1
|
899 |
+
898 1
|
900 |
+
899 1
|
901 |
+
900 1
|
902 |
+
901 1
|
903 |
+
902 1
|
904 |
+
903 1
|
905 |
+
904 1
|
906 |
+
905 1
|
907 |
+
906 1
|
908 |
+
907 1
|
909 |
+
908 1
|
910 |
+
909 1
|
911 |
+
910 1
|
912 |
+
911 1
|
913 |
+
912 1
|
914 |
+
913 1
|
915 |
+
914 1
|
916 |
+
915 1
|
917 |
+
916 1
|
918 |
+
917 1
|
919 |
+
918 1
|
920 |
+
919 1
|
921 |
+
920 1
|
922 |
+
921 1
|
923 |
+
922 1
|
924 |
+
923 1
|
925 |
+
924 1
|
926 |
+
925 1
|
927 |
+
926 1
|
928 |
+
927 1
|
929 |
+
928 1
|
930 |
+
929 1
|
931 |
+
930 1
|
932 |
+
931 1
|
933 |
+
932 1
|
934 |
+
933 1
|
935 |
+
934 1
|
936 |
+
935 1
|
937 |
+
936 1
|
938 |
+
937 1
|
939 |
+
938 1
|
940 |
+
939 1
|
941 |
+
940 1
|
942 |
+
941 1
|
943 |
+
942 1
|
944 |
+
943 1
|
945 |
+
944 1
|
946 |
+
945 1
|
947 |
+
946 1
|
948 |
+
947 1
|
949 |
+
948 1
|
950 |
+
949 1
|
951 |
+
950 1
|
952 |
+
951 1
|
953 |
+
952 1
|
954 |
+
953 1
|
955 |
+
954 1
|
956 |
+
955 1
|
957 |
+
956 1
|
958 |
+
957 1
|
959 |
+
958 1
|
960 |
+
959 1
|
961 |
+
960 1
|
962 |
+
961 1
|
963 |
+
962 1
|
964 |
+
963 1
|
965 |
+
964 1
|
966 |
+
965 1
|
967 |
+
966 1
|
968 |
+
967 1
|
969 |
+
968 1
|
970 |
+
969 1
|
971 |
+
970 1
|
972 |
+
971 1
|
973 |
+
972 1
|
974 |
+
973 1
|
975 |
+
974 1
|
976 |
+
975 1
|
977 |
+
976 1
|
978 |
+
977 1
|
979 |
+
978 1
|
980 |
+
979 1
|
981 |
+
980 1
|
982 |
+
981 1
|
983 |
+
982 1
|
984 |
+
983 1
|
985 |
+
984 1
|
986 |
+
985 1
|
987 |
+
986 1
|
988 |
+
987 1
|
989 |
+
988 1
|
990 |
+
989 1
|
991 |
+
990 1
|
992 |
+
991 1
|
993 |
+
992 1
|
994 |
+
993 1
|
995 |
+
994 1
|
996 |
+
995 1
|
997 |
+
996 1
|
998 |
+
997 1
|
999 |
+
998 1
|
1000 |
+
999 1
|
semantic_tokenizer/f40ms/config/hubert_config.yaml
ADDED
@@ -0,0 +1,344 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
_name: null
|
2 |
+
common:
|
3 |
+
_name: null
|
4 |
+
no_progress_bar: false
|
5 |
+
log_interval: 100
|
6 |
+
log_format: json
|
7 |
+
log_file: null
|
8 |
+
aim_repo: null
|
9 |
+
aim_run_hash: null
|
10 |
+
tensorboard_logdir: tblog
|
11 |
+
wandb_project: null
|
12 |
+
azureml_logging: false
|
13 |
+
seed: 1337
|
14 |
+
cpu: false
|
15 |
+
tpu: false
|
16 |
+
bf16: false
|
17 |
+
memory_efficient_bf16: false
|
18 |
+
fp16: true
|
19 |
+
memory_efficient_fp16: true
|
20 |
+
fp16_no_flatten_grads: false
|
21 |
+
fp16_init_scale: 128
|
22 |
+
fp16_scale_window: null
|
23 |
+
fp16_scale_tolerance: 0.0
|
24 |
+
on_cpu_convert_precision: false
|
25 |
+
min_loss_scale: 0.0001
|
26 |
+
threshold_loss_scale: null
|
27 |
+
amp: false
|
28 |
+
amp_batch_retries: 2
|
29 |
+
amp_init_scale: 128
|
30 |
+
amp_scale_window: null
|
31 |
+
user_dir: null
|
32 |
+
empty_cache_freq: 0
|
33 |
+
all_gather_list_size: 16384
|
34 |
+
model_parallel_size: 1
|
35 |
+
quantization_config_path: null
|
36 |
+
profile: false
|
37 |
+
reset_logging: false
|
38 |
+
suppress_crashes: false
|
39 |
+
use_plasma_view: false
|
40 |
+
plasma_path: /tmp/plasma
|
41 |
+
common_eval:
|
42 |
+
_name: null
|
43 |
+
path: null
|
44 |
+
post_process: null
|
45 |
+
quiet: false
|
46 |
+
model_overrides: '{}'
|
47 |
+
results_path: null
|
48 |
+
distributed_training:
|
49 |
+
_name: null
|
50 |
+
distributed_world_size: 64
|
51 |
+
distributed_num_procs: 8
|
52 |
+
distributed_rank: 0
|
53 |
+
distributed_backend: nccl
|
54 |
+
distributed_init_method: tcp://modelarts-job-4e3029c5-de6d-4973-85ce-9be855ccbfcf:6000
|
55 |
+
distributed_port: -1
|
56 |
+
device_id: 0
|
57 |
+
distributed_no_spawn: false
|
58 |
+
ddp_backend: no_c10d
|
59 |
+
ddp_comm_hook: none
|
60 |
+
bucket_cap_mb: 25
|
61 |
+
fix_batches_to_gpus: false
|
62 |
+
find_unused_parameters: true
|
63 |
+
gradient_as_bucket_view: false
|
64 |
+
fast_stat_sync: false
|
65 |
+
heartbeat_timeout: -1
|
66 |
+
broadcast_buffers: false
|
67 |
+
slowmo_momentum: null
|
68 |
+
slowmo_base_algorithm: localsgd
|
69 |
+
localsgd_frequency: 3
|
70 |
+
nprocs_per_node: 8
|
71 |
+
pipeline_model_parallel: false
|
72 |
+
pipeline_balance: null
|
73 |
+
pipeline_devices: null
|
74 |
+
pipeline_chunks: 0
|
75 |
+
pipeline_encoder_balance: null
|
76 |
+
pipeline_encoder_devices: null
|
77 |
+
pipeline_decoder_balance: null
|
78 |
+
pipeline_decoder_devices: null
|
79 |
+
pipeline_checkpoint: never
|
80 |
+
zero_sharding: none
|
81 |
+
fp16: true
|
82 |
+
memory_efficient_fp16: true
|
83 |
+
tpu: false
|
84 |
+
no_reshard_after_forward: false
|
85 |
+
fp32_reduce_scatter: false
|
86 |
+
cpu_offload: false
|
87 |
+
use_sharded_state: false
|
88 |
+
not_fsdp_flatten_parameters: false
|
89 |
+
dataset:
|
90 |
+
_name: null
|
91 |
+
num_workers: 1
|
92 |
+
skip_invalid_size_inputs_valid_test: true
|
93 |
+
max_tokens: 450000
|
94 |
+
batch_size: null
|
95 |
+
required_batch_size_multiple: 8
|
96 |
+
required_seq_len_multiple: 1
|
97 |
+
dataset_impl: null
|
98 |
+
data_buffer_size: 10
|
99 |
+
train_subset: train
|
100 |
+
valid_subset: valid
|
101 |
+
combine_valid_subsets: null
|
102 |
+
ignore_unused_valid_subsets: false
|
103 |
+
validate_interval: 1
|
104 |
+
validate_interval_updates: 1000000
|
105 |
+
validate_after_updates: 0
|
106 |
+
fixed_validation_seed: null
|
107 |
+
disable_validation: false
|
108 |
+
max_tokens_valid: 450000
|
109 |
+
batch_size_valid: null
|
110 |
+
max_valid_steps: null
|
111 |
+
curriculum: 0
|
112 |
+
gen_subset: test
|
113 |
+
num_shards: 1
|
114 |
+
shard_id: 0
|
115 |
+
grouped_shuffling: false
|
116 |
+
update_epoch_batch_itr: false
|
117 |
+
update_ordered_indices_seed: false
|
118 |
+
optimization:
|
119 |
+
_name: null
|
120 |
+
max_epoch: 0
|
121 |
+
max_update: 700000
|
122 |
+
stop_time_hours: 0.0
|
123 |
+
clip_norm: 1.0
|
124 |
+
sentence_avg: false
|
125 |
+
update_freq:
|
126 |
+
- 4
|
127 |
+
lr:
|
128 |
+
- 0.0015
|
129 |
+
stop_min_lr: -1.0
|
130 |
+
use_bmuf: false
|
131 |
+
skip_remainder_batch: false
|
132 |
+
checkpoint:
|
133 |
+
_name: null
|
134 |
+
save_dir: checkpoints
|
135 |
+
restore_file: checkpoint_last.pt
|
136 |
+
continue_once: null
|
137 |
+
finetune_from_model: null
|
138 |
+
reset_dataloader: false
|
139 |
+
reset_lr_scheduler: false
|
140 |
+
reset_meters: false
|
141 |
+
reset_optimizer: false
|
142 |
+
optimizer_overrides: '{}'
|
143 |
+
save_interval: 1
|
144 |
+
save_interval_updates: 1000000
|
145 |
+
keep_interval_updates: 100
|
146 |
+
keep_interval_updates_pattern: -1
|
147 |
+
keep_last_epochs: -1
|
148 |
+
keep_best_checkpoints: -1
|
149 |
+
no_save: false
|
150 |
+
no_epoch_checkpoints: false
|
151 |
+
no_last_checkpoints: false
|
152 |
+
no_save_optimizer_state: false
|
153 |
+
best_checkpoint_metric: loss
|
154 |
+
maximize_best_checkpoint_metric: false
|
155 |
+
patience: -1
|
156 |
+
checkpoint_suffix: ''
|
157 |
+
checkpoint_shard_count: 1
|
158 |
+
load_checkpoint_on_all_dp_ranks: false
|
159 |
+
write_checkpoints_asynchronously: false
|
160 |
+
model_parallel_size: 1
|
161 |
+
bmuf:
|
162 |
+
_name: null
|
163 |
+
block_lr: 1.0
|
164 |
+
block_momentum: 0.875
|
165 |
+
global_sync_iter: 50
|
166 |
+
warmup_iterations: 500
|
167 |
+
use_nbm: false
|
168 |
+
average_sync: false
|
169 |
+
distributed_world_size: 64
|
170 |
+
generation:
|
171 |
+
_name: null
|
172 |
+
beam: 5
|
173 |
+
nbest: 1
|
174 |
+
max_len_a: 0.0
|
175 |
+
max_len_b: 200
|
176 |
+
min_len: 1
|
177 |
+
match_source_len: false
|
178 |
+
unnormalized: false
|
179 |
+
no_early_stop: false
|
180 |
+
no_beamable_mm: false
|
181 |
+
lenpen: 1.0
|
182 |
+
unkpen: 0.0
|
183 |
+
replace_unk: null
|
184 |
+
sacrebleu: false
|
185 |
+
score_reference: false
|
186 |
+
prefix_size: 0
|
187 |
+
no_repeat_ngram_size: 0
|
188 |
+
sampling: false
|
189 |
+
sampling_topk: -1
|
190 |
+
sampling_topp: -1.0
|
191 |
+
constraints: null
|
192 |
+
temperature: 1.0
|
193 |
+
diverse_beam_groups: -1
|
194 |
+
diverse_beam_strength: 0.5
|
195 |
+
diversity_rate: -1.0
|
196 |
+
print_alignment: null
|
197 |
+
print_step: false
|
198 |
+
lm_path: null
|
199 |
+
lm_weight: 0.0
|
200 |
+
iter_decode_eos_penalty: 0.0
|
201 |
+
iter_decode_max_iter: 10
|
202 |
+
iter_decode_force_max_iter: false
|
203 |
+
iter_decode_with_beam: 1
|
204 |
+
iter_decode_with_external_reranker: false
|
205 |
+
retain_iter_history: false
|
206 |
+
retain_dropout: false
|
207 |
+
retain_dropout_modules: null
|
208 |
+
decoding_format: null
|
209 |
+
no_seed_provided: false
|
210 |
+
eos_token: null
|
211 |
+
eval_lm:
|
212 |
+
_name: null
|
213 |
+
output_word_probs: false
|
214 |
+
output_word_stats: false
|
215 |
+
context_window: 0
|
216 |
+
softmax_batch: 9223372036854775807
|
217 |
+
interactive:
|
218 |
+
_name: null
|
219 |
+
buffer_size: 0
|
220 |
+
input: '-'
|
221 |
+
model:
|
222 |
+
_name: hubert
|
223 |
+
label_rate: 50.0
|
224 |
+
extractor_mode: layer_norm
|
225 |
+
encoder_layers: 24
|
226 |
+
encoder_embed_dim: 1024
|
227 |
+
encoder_ffn_embed_dim: 4096
|
228 |
+
encoder_attention_heads: 16
|
229 |
+
activation_fn: gelu
|
230 |
+
layer_type: transformer
|
231 |
+
dropout: 0.0
|
232 |
+
attention_dropout: 0.0
|
233 |
+
activation_dropout: 0.0
|
234 |
+
encoder_layerdrop: 0.0
|
235 |
+
dropout_input: 0.0
|
236 |
+
dropout_features: 0.0
|
237 |
+
final_dim: 768
|
238 |
+
untie_final_proj: true
|
239 |
+
layer_norm_first: true
|
240 |
+
conv_feature_layers: '[(512,10,5)] + [(512,3,2)] * 4 + [(512,2,2)] * 2'
|
241 |
+
conv_bias: false
|
242 |
+
logit_temp: 0.1
|
243 |
+
target_glu: false
|
244 |
+
feature_grad_mult: 1.0
|
245 |
+
mask_length: 10
|
246 |
+
mask_prob: 0.8
|
247 |
+
mask_selection: static
|
248 |
+
mask_other: 0.0
|
249 |
+
no_mask_overlap: false
|
250 |
+
mask_min_space: 1
|
251 |
+
mask_channel_length: 10
|
252 |
+
mask_channel_prob: 0.0
|
253 |
+
mask_channel_selection: static
|
254 |
+
mask_channel_other: 0.0
|
255 |
+
no_mask_channel_overlap: false
|
256 |
+
mask_channel_min_space: 1
|
257 |
+
conv_pos: 128
|
258 |
+
conv_pos_groups: 16
|
259 |
+
latent_temp:
|
260 |
+
- 2.0
|
261 |
+
- 0.5
|
262 |
+
- 0.999995
|
263 |
+
skip_masked: false
|
264 |
+
skip_nomask: false
|
265 |
+
checkpoint_activations: false
|
266 |
+
required_seq_len_multiple: 2
|
267 |
+
depthwise_conv_kernel_size: 31
|
268 |
+
attn_type: ''
|
269 |
+
pos_enc_type: abs
|
270 |
+
fp16: false
|
271 |
+
task:
|
272 |
+
_name: hubert_pretraining
|
273 |
+
data: data
|
274 |
+
fine_tuning: false
|
275 |
+
labels:
|
276 |
+
- km
|
277 |
+
label_dir: config
|
278 |
+
label_rate: 50.0
|
279 |
+
sample_rate: 16000
|
280 |
+
normalize: true
|
281 |
+
enable_padding: false
|
282 |
+
max_keep_size: 320000
|
283 |
+
max_sample_size: 320000
|
284 |
+
min_sample_size: 16000
|
285 |
+
single_target: false
|
286 |
+
random_crop: true
|
287 |
+
pad_audio: false
|
288 |
+
criterion:
|
289 |
+
_name: hubert
|
290 |
+
pred_masked_weight: 1.0
|
291 |
+
pred_nomask_weight: 0.0
|
292 |
+
loss_weights:
|
293 |
+
- 10.0
|
294 |
+
log_keys: []
|
295 |
+
optimizer:
|
296 |
+
_name: adam
|
297 |
+
adam_betas: (0.9,0.98)
|
298 |
+
adam_eps: 1.0e-06
|
299 |
+
weight_decay: 0.01
|
300 |
+
use_old_adam: false
|
301 |
+
fp16_adam_stats: false
|
302 |
+
tpu: false
|
303 |
+
lr:
|
304 |
+
- 0.0015
|
305 |
+
lr_scheduler:
|
306 |
+
_name: polynomial_decay
|
307 |
+
warmup_updates: 32000
|
308 |
+
force_anneal: null
|
309 |
+
end_learning_rate: 0.0
|
310 |
+
power: 1.0
|
311 |
+
total_num_update: 700000.0
|
312 |
+
lr:
|
313 |
+
- 0.0015
|
314 |
+
scoring: null
|
315 |
+
bpe: null
|
316 |
+
tokenizer: null
|
317 |
+
ema:
|
318 |
+
_name: null
|
319 |
+
store_ema: false
|
320 |
+
ema_decay: 0.9999
|
321 |
+
ema_start_update: 0
|
322 |
+
ema_seed_model: null
|
323 |
+
ema_update_freq: 1
|
324 |
+
ema_fp32: false
|
325 |
+
job_logging_cfg:
|
326 |
+
version: 1
|
327 |
+
formatters:
|
328 |
+
simple:
|
329 |
+
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
|
330 |
+
handlers:
|
331 |
+
console:
|
332 |
+
class: logging.StreamHandler
|
333 |
+
formatter: simple
|
334 |
+
stream: ext://sys.stdout
|
335 |
+
file:
|
336 |
+
class: logging.FileHandler
|
337 |
+
formatter: simple
|
338 |
+
filename: hydra_train.log
|
339 |
+
root:
|
340 |
+
level: INFO
|
341 |
+
handlers:
|
342 |
+
- console
|
343 |
+
- file
|
344 |
+
disable_existing_loggers: false
|
semantic_tokenizer/f40ms/fairseq_npu_patch.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Xiao Chen)
|
2 |
+
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
|
16 |
+
from patch_utils import MindSpeedPatchesManager as aspm
|
17 |
+
import torch_npu
|
18 |
+
|
19 |
+
def cnn_feature_forward_npu(self, x):
|
20 |
+
|
21 |
+
# BxT -> BxCxT
|
22 |
+
x = x.unsqueeze(1)
|
23 |
+
|
24 |
+
for conv in self.conv_layers:
|
25 |
+
# x = conv(x)
|
26 |
+
x_tmp = conv[:-1](x)
|
27 |
+
x = torch_npu.npu_gelu(x_tmp)
|
28 |
+
|
29 |
+
return x
|
30 |
+
|
31 |
+
def patch_for_npu():
|
32 |
+
# replace torch.cuda.get_device_capability with implementation from MindSpeed
|
33 |
+
aspm.register_patch('fairseq.models.wav2vec.wav2vec2.ConvFeatureExtractionModel.forward', cnn_feature_forward_npu)
|
34 |
+
aspm.apply_patches()
|
semantic_tokenizer/f40ms/infer_for_eval.py
ADDED
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2025. Huawei Technologies Co., Ltd. All Rights Reserved. (authors: Daxin Tan,
|
2 |
+
# Xiao Chen)
|
3 |
+
|
4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
+
# you may not use this file except in compliance with the License.
|
6 |
+
# You may obtain a copy of the License at
|
7 |
+
|
8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9 |
+
|
10 |
+
# Unless required by applicable law or agreed to in writing, software
|
11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
+
# See the License for the specific language governing permissions and
|
14 |
+
# limitations under the License.
|
15 |
+
|
16 |
+
|
17 |
+
from simple_tokenizer_infer import SpeechTokenizer
|
18 |
+
import argparse
|
19 |
+
import librosa
|
20 |
+
import logging
|
21 |
+
from pathlib import Path
|
22 |
+
|
23 |
+
|
24 |
+
def main(args):
|
25 |
+
ref_wav_file_list = []
|
26 |
+
line_info_list = []
|
27 |
+
reconstruct_wav_file_list = []
|
28 |
+
|
29 |
+
logging.info(f"loading eval file list")
|
30 |
+
base_path = Path(args.input_list).parent
|
31 |
+
with open(args.input_list, "r") as input_file:
|
32 |
+
for line in input_file:
|
33 |
+
fields = line.strip().split("|")
|
34 |
+
if args.input_type == "tts":
|
35 |
+
ref_wav_file_list.append(fields[2])
|
36 |
+
else:
|
37 |
+
reconstruct_wav_file_list.append(fields[4])
|
38 |
+
ref_wav_file_list.append(fields[2])
|
39 |
+
line_info_list.append([fields[2], fields[0], fields[3]]) # ref wav path, gen wav id, text
|
40 |
+
|
41 |
+
logging.info(f"loading ref audio")
|
42 |
+
raw_ref_wavs_list = [] # 用librosa 加载后的raw wave 波形数据
|
43 |
+
for file_path in ref_wav_file_list:
|
44 |
+
# 加载波形数据
|
45 |
+
raw_wav, sr = librosa.load(
|
46 |
+
(base_path / file_path), sr=16000
|
47 |
+
) # sr=None 保留原始采样率
|
48 |
+
raw_ref_wavs_list.append(raw_wav)
|
49 |
+
|
50 |
+
logging.info(f"extracting token for ref audio")
|
51 |
+
if args.ckpt is not None:
|
52 |
+
tokenizer = SpeechTokenizer(
|
53 |
+
ckpt_path=args.ckpt, cfg_path=args.cfg_path, cfg_name=args.cfg_name
|
54 |
+
)
|
55 |
+
else:
|
56 |
+
tokenizer = SpeechTokenizer()
|
57 |
+
ref_token_list, ref_token_info_list = tokenizer.extract(raw_ref_wavs_list)
|
58 |
+
|
59 |
+
if args.input_type == "reconstruct":
|
60 |
+
logging.info(f"loading reconstruct audio")
|
61 |
+
raw_reconstruct_wav_list = [] # 用librosa 加载后的raw wave 波形数据
|
62 |
+
for file_path in reconstruct_wav_file_list:
|
63 |
+
# 加载波形数据
|
64 |
+
raw_wav, sr = librosa.load(
|
65 |
+
(base_path / file_path), sr=16000
|
66 |
+
) # sr=None 保留原始采样率
|
67 |
+
raw_reconstruct_wav_list.append(raw_wav)
|
68 |
+
|
69 |
+
logging.info(f"extracting token for reconstruct audio")
|
70 |
+
recon_token_list, recon_token_info_list = tokenizer.extract(raw_reconstruct_wav_list)
|
71 |
+
assert(len(ref_token_info_list) == len(recon_token_info_list))
|
72 |
+
|
73 |
+
assert(len(ref_token_info_list) == len(line_info_list))
|
74 |
+
with open(args.output_file, "w") as output_file:
|
75 |
+
logging.info(f"writing output file")
|
76 |
+
if args.input_type == "tts":
|
77 |
+
for ref, line_info in zip(ref_token_info_list, line_info_list):
|
78 |
+
ref_units = ref["reduced_unit_sequence"]
|
79 |
+
# logging.info(ref_units)
|
80 |
+
ref_path = str((base_path / line_info[0]))
|
81 |
+
output_file.write(f"{ref_path}|{ref_units}|{line_info[1]}|{line_info[2]}\n")
|
82 |
+
else:
|
83 |
+
for ref, recon, line_info in zip(ref_token_info_list, recon_token_info_list, line_info_list):
|
84 |
+
ref_units = ref["reduced_unit_sequence"]
|
85 |
+
recon_units = recon["reduced_unit_sequence"]
|
86 |
+
# logging.info(ref_units)
|
87 |
+
ref_path = str((base_path / line_info[0]))
|
88 |
+
output_file.write(f"{ref_path}|{ref_units}|{line_info[1]}|{recon_units}|{line_info[2]}\n")
|
89 |
+
output_file.close()
|
90 |
+
logging.info("Finished")
|
91 |
+
return
|
92 |
+
|
93 |
+
|
94 |
+
if __name__ == "__main__":
|
95 |
+
parser = argparse.ArgumentParser()
|
96 |
+
parser.add_argument(
|
97 |
+
"--ckpt",
|
98 |
+
dest="ckpt",
|
99 |
+
required=False,
|
100 |
+
help="path to ckpt",
|
101 |
+
)
|
102 |
+
parser.add_argument(
|
103 |
+
"--cfg-path",
|
104 |
+
dest="cfg_path",
|
105 |
+
required=False,
|
106 |
+
default="config",
|
107 |
+
help="path to config",
|
108 |
+
)
|
109 |
+
parser.add_argument(
|
110 |
+
"--cfg-name",
|
111 |
+
dest="cfg_name",
|
112 |
+
required=False,
|
113 |
+
default="hubert_config",
|
114 |
+
help="name of config",
|
115 |
+
)
|
116 |
+
parser.add_argument(
|
117 |
+
"--input-list",
|
118 |
+
dest="input_list",
|
119 |
+
required=True,
|
120 |
+
help="list of input wavform",
|
121 |
+
)
|
122 |
+
parser.add_argument(
|
123 |
+
"--output-file",
|
124 |
+
dest="output_file",
|
125 |
+
required=True,
|
126 |
+
help="file to output speech tokens",
|
127 |
+
)
|
128 |
+
parser.add_argument(
|
129 |
+
"--input-type",
|
130 |
+
default="tts",
|
131 |
+
type=str,
|
132 |
+
required=True,
|
133 |
+
help=f"test fil list type: tts or reconstruct, seedtts format",
|
134 |
+
)
|
135 |
+
args = parser.parse_args()
|
136 |
+
|
137 |
+
if args.input_type not in {"tts", "reconstruct"}:
|
138 |
+
logging.info(f"Input type must be tts or reconstruct")
|
139 |
+
exit()
|
140 |
+
main(args)
|
semantic_tokenizer/f40ms/models/__init__.py
ADDED
File without changes
|