Spaces:
Running
on
Zero
Running
on
Zero
pre-commit
Browse files- app.py +120 -98
- en.txt +1 -1
- frankenstein5k.md +1 -1
- gatsby5k.md +1 -1
- packages.txt +1 -1
app.py
CHANGED
@@ -1,132 +1,138 @@
|
|
1 |
-
import spaces
|
2 |
-
from kokoro import KModel, KPipeline
|
3 |
-
import gradio as gr
|
4 |
import os
|
5 |
import random
|
|
|
|
|
|
|
6 |
import torch
|
|
|
7 |
|
8 |
-
IS_DUPLICATE = not os.getenv(
|
9 |
CUDA_AVAILABLE = torch.cuda.is_available()
|
10 |
if not IS_DUPLICATE:
|
11 |
import kokoro
|
12 |
import misaki
|
13 |
-
|
|
|
14 |
|
15 |
CHAR_LIMIT = None if IS_DUPLICATE else 5000
|
16 |
-
models = {gpu: KModel().to(
|
17 |
-
pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_code in
|
18 |
-
pipelines[
|
19 |
-
pipelines[
|
|
|
20 |
|
21 |
@spaces.GPU(duration=30)
|
22 |
-
def forward_gpu(ps, ref_s, speed):
|
23 |
return models[True](ps, ref_s, speed)
|
24 |
|
25 |
-
|
|
|
26 |
text = text if CHAR_LIMIT is None else text.strip()[:CHAR_LIMIT]
|
27 |
pipeline = pipelines[voice[0]]
|
28 |
pack = pipeline.load_voice(voice)
|
29 |
use_gpu = use_gpu and CUDA_AVAILABLE
|
30 |
for _, ps, _ in pipeline(text, voice, speed):
|
31 |
-
ref_s = pack[len(ps)-1]
|
32 |
try:
|
33 |
-
if use_gpu
|
34 |
-
audio = forward_gpu(ps, ref_s, speed)
|
35 |
-
else:
|
36 |
-
audio = models[False](ps, ref_s, speed)
|
37 |
except gr.exceptions.Error as e:
|
38 |
if use_gpu:
|
39 |
gr.Warning(str(e))
|
40 |
-
gr.Info(
|
41 |
audio = models[False](ps, ref_s, speed)
|
42 |
else:
|
43 |
-
raise gr.Error(e)
|
44 |
return (24000, audio.numpy()), ps
|
45 |
-
return None,
|
|
|
46 |
|
47 |
# Arena API
|
48 |
-
def predict(text, voice=
|
49 |
return generate_first(text, voice, speed, use_gpu=False)[0]
|
50 |
|
51 |
-
|
|
|
52 |
pipeline = pipelines[voice[0]]
|
53 |
for _, ps, _ in pipeline(text, voice):
|
54 |
return ps
|
55 |
-
return
|
|
|
56 |
|
57 |
-
def generate_all(text, voice=
|
58 |
text = text if CHAR_LIMIT is None else text.strip()[:CHAR_LIMIT]
|
59 |
pipeline = pipelines[voice[0]]
|
60 |
pack = pipeline.load_voice(voice)
|
61 |
use_gpu = use_gpu and CUDA_AVAILABLE
|
62 |
first = True
|
63 |
for _, ps, _ in pipeline(text, voice, speed):
|
64 |
-
ref_s = pack[len(ps)-1]
|
65 |
try:
|
66 |
-
if use_gpu
|
67 |
-
audio = forward_gpu(ps, ref_s, speed)
|
68 |
-
else:
|
69 |
-
audio = models[False](ps, ref_s, speed)
|
70 |
except gr.exceptions.Error as e:
|
71 |
if use_gpu:
|
72 |
gr.Warning(str(e))
|
73 |
-
gr.Info(
|
74 |
audio = models[False](ps, ref_s, speed)
|
75 |
else:
|
76 |
-
raise gr.Error(e)
|
77 |
yield 24000, audio.numpy()
|
78 |
if first:
|
79 |
first = False
|
80 |
yield 24000, torch.zeros(1).numpy()
|
81 |
|
82 |
-
|
|
|
83 |
random_quotes = [line.strip() for line in r]
|
84 |
|
85 |
-
def get_random_quote():
|
86 |
-
return random.choice(random_quotes)
|
87 |
|
88 |
-
def
|
89 |
-
|
|
|
|
|
|
|
|
|
90 |
return r.read().strip()
|
91 |
|
92 |
-
|
93 |
-
|
|
|
94 |
return r.read().strip()
|
95 |
|
|
|
96 |
CHOICES = {
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
}
|
126 |
for v in CHOICES.values():
|
127 |
pipelines[v[0]].load_voice(v)
|
128 |
|
129 |
-
TOKEN_NOTE =
|
130 |
💡 Customize pronunciation with Markdown link syntax and /slashes/ like `[Kokoro](/kˈOkəɹO/)`
|
131 |
|
132 |
💬 To adjust intonation, try punctuation `;:,.!?—…"()“”` or stress `ˈ` and `ˌ`
|
@@ -134,69 +140,85 @@ TOKEN_NOTE = '''
|
|
134 |
⬇️ Lower stress `[1 level](-1)` or `[2 levels](-2)`
|
135 |
|
136 |
⬆️ Raise stress 1 level `[or](+2)` 2 levels (only works on less stressed, usually short words)
|
137 |
-
|
138 |
|
139 |
with gr.Blocks() as generate_tab:
|
140 |
-
out_audio = gr.Audio(label=
|
141 |
-
generate_btn = gr.Button(
|
142 |
-
with gr.Accordion(
|
143 |
-
out_ps = gr.Textbox(
|
144 |
-
|
|
|
|
|
145 |
gr.Markdown(TOKEN_NOTE)
|
146 |
-
predict_btn = gr.Button(
|
147 |
|
148 |
-
|
149 |
if CHAR_LIMIT is not None:
|
150 |
-
|
151 |
-
|
152 |
-
|
|
|
|
|
153 |
|
154 |
with gr.Blocks() as stream_tab:
|
155 |
-
out_stream = gr.Audio(label=
|
156 |
with gr.Row():
|
157 |
-
stream_btn = gr.Button(
|
158 |
-
stop_btn = gr.Button(
|
159 |
-
with gr.Accordion(
|
160 |
gr.Markdown(STREAM_NOTE)
|
161 |
gr.DuplicateButton()
|
162 |
|
163 |
-
BANNER_TEXT =
|
164 |
[***Kokoro*** **is an open-weight TTS model with 82 million parameters.**](https://huggingface.co/hexgrad/Kokoro-82M)
|
165 |
|
166 |
This demo only showcases English, but you can directly use the model to access other languages.
|
167 |
-
|
168 |
-
API_OPEN = os.getenv(
|
169 |
API_NAME = None if API_OPEN else False
|
170 |
with gr.Blocks() as app:
|
171 |
with gr.Row():
|
172 |
gr.Markdown(BANNER_TEXT, container=True)
|
173 |
with gr.Row():
|
174 |
with gr.Column():
|
175 |
-
text = gr.Textbox(
|
|
|
|
|
|
|
176 |
with gr.Row():
|
177 |
-
voice = gr.Dropdown(
|
|
|
|
|
|
|
|
|
|
|
178 |
use_gpu = gr.Dropdown(
|
179 |
-
[(
|
180 |
value=CUDA_AVAILABLE,
|
181 |
-
label=
|
182 |
-
info=
|
183 |
-
interactive=CUDA_AVAILABLE
|
184 |
)
|
185 |
-
speed = gr.Slider(minimum=0.5, maximum=2, value=1, step=0.1, label=
|
186 |
-
random_btn = gr.Button(
|
187 |
with gr.Row():
|
188 |
-
gatsby_btn = gr.Button(
|
189 |
-
frankenstein_btn = gr.Button(
|
190 |
with gr.Column():
|
191 |
-
gr.TabbedInterface([generate_tab, stream_tab], [
|
192 |
random_btn.click(fn=get_random_quote, inputs=[], outputs=[text], api_name=API_NAME)
|
193 |
gatsby_btn.click(fn=get_gatsby, inputs=[], outputs=[text], api_name=API_NAME)
|
194 |
frankenstein_btn.click(fn=get_frankenstein, inputs=[], outputs=[text], api_name=API_NAME)
|
195 |
-
generate_btn.click(
|
|
|
|
|
196 |
tokenize_btn.click(fn=tokenize_first, inputs=[text, voice], outputs=[out_ps], api_name=API_NAME)
|
197 |
-
stream_event = stream_btn.click(
|
|
|
|
|
198 |
stop_btn.click(fn=None, cancels=stream_event)
|
199 |
predict_btn.click(fn=predict, inputs=[text, voice, speed], outputs=[out_audio], api_name=API_NAME)
|
200 |
|
201 |
-
if __name__ ==
|
202 |
app.queue(api_open=API_OPEN).launch(show_api=API_OPEN, ssr_mode=True)
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
import random
|
3 |
+
|
4 |
+
import gradio as gr
|
5 |
+
import spaces
|
6 |
import torch
|
7 |
+
from kokoro import KModel, KPipeline
|
8 |
|
9 |
+
IS_DUPLICATE = not os.getenv("SPACE_ID", "").startswith("hexgrad/")
|
10 |
CUDA_AVAILABLE = torch.cuda.is_available()
|
11 |
if not IS_DUPLICATE:
|
12 |
import kokoro
|
13 |
import misaki
|
14 |
+
|
15 |
+
print("DEBUG", kokoro.__version__, CUDA_AVAILABLE, misaki.__version__) # noqa: T201
|
16 |
|
17 |
CHAR_LIMIT = None if IS_DUPLICATE else 5000
|
18 |
+
models = {gpu: KModel().to("cuda" if gpu else "cpu").eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])}
|
19 |
+
pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_code in "ab"}
|
20 |
+
pipelines["a"].g2p.lexicon.golds["kokoro"] = "kˈOkəɹO" # noqa: RUF001
|
21 |
+
pipelines["b"].g2p.lexicon.golds["kokoro"] = "kˈQkəɹQ" # noqa: RUF001
|
22 |
+
|
23 |
|
24 |
@spaces.GPU(duration=30)
|
25 |
+
def forward_gpu(ps, ref_s, speed): # noqa: ANN001, ANN201
|
26 |
return models[True](ps, ref_s, speed)
|
27 |
|
28 |
+
|
29 |
+
def generate_first(text, voice="af_heart", speed=1, use_gpu=CUDA_AVAILABLE): # noqa: ANN001, ANN201
|
30 |
text = text if CHAR_LIMIT is None else text.strip()[:CHAR_LIMIT]
|
31 |
pipeline = pipelines[voice[0]]
|
32 |
pack = pipeline.load_voice(voice)
|
33 |
use_gpu = use_gpu and CUDA_AVAILABLE
|
34 |
for _, ps, _ in pipeline(text, voice, speed):
|
35 |
+
ref_s = pack[len(ps) - 1]
|
36 |
try:
|
37 |
+
audio = forward_gpu(ps, ref_s, speed) if use_gpu else models[False](ps, ref_s, speed)
|
|
|
|
|
|
|
38 |
except gr.exceptions.Error as e:
|
39 |
if use_gpu:
|
40 |
gr.Warning(str(e))
|
41 |
+
gr.Info("Retrying with CPU. To avoid this error, change Hardware to CPU.")
|
42 |
audio = models[False](ps, ref_s, speed)
|
43 |
else:
|
44 |
+
raise gr.Error(e) # noqa: B904
|
45 |
return (24000, audio.numpy()), ps
|
46 |
+
return None, ""
|
47 |
+
|
48 |
|
49 |
# Arena API
|
50 |
+
def predict(text, voice="af_heart", speed=1): # noqa: ANN001, ANN201
|
51 |
return generate_first(text, voice, speed, use_gpu=False)[0]
|
52 |
|
53 |
+
|
54 |
+
def tokenize_first(text, voice="af_heart"): # noqa: ANN001, ANN201
|
55 |
pipeline = pipelines[voice[0]]
|
56 |
for _, ps, _ in pipeline(text, voice):
|
57 |
return ps
|
58 |
+
return ""
|
59 |
+
|
60 |
|
61 |
+
def generate_all(text, voice="af_heart", speed=1, use_gpu=CUDA_AVAILABLE): # noqa: ANN001, ANN201
|
62 |
text = text if CHAR_LIMIT is None else text.strip()[:CHAR_LIMIT]
|
63 |
pipeline = pipelines[voice[0]]
|
64 |
pack = pipeline.load_voice(voice)
|
65 |
use_gpu = use_gpu and CUDA_AVAILABLE
|
66 |
first = True
|
67 |
for _, ps, _ in pipeline(text, voice, speed):
|
68 |
+
ref_s = pack[len(ps) - 1]
|
69 |
try:
|
70 |
+
audio = forward_gpu(ps, ref_s, speed) if use_gpu else models[False](ps, ref_s, speed)
|
|
|
|
|
|
|
71 |
except gr.exceptions.Error as e:
|
72 |
if use_gpu:
|
73 |
gr.Warning(str(e))
|
74 |
+
gr.Info("Switching to CPU")
|
75 |
audio = models[False](ps, ref_s, speed)
|
76 |
else:
|
77 |
+
raise gr.Error(e) # noqa: B904
|
78 |
yield 24000, audio.numpy()
|
79 |
if first:
|
80 |
first = False
|
81 |
yield 24000, torch.zeros(1).numpy()
|
82 |
|
83 |
+
|
84 |
+
with open("en.txt") as r: # noqa: PTH123
|
85 |
random_quotes = [line.strip() for line in r]
|
86 |
|
|
|
|
|
87 |
|
88 |
+
def get_random_quote(): # noqa: ANN201
|
89 |
+
return random.choice(random_quotes) # noqa: S311
|
90 |
+
|
91 |
+
|
92 |
+
def get_gatsby(): # noqa: ANN201
|
93 |
+
with open("gatsby5k.md") as r: # noqa: PTH123
|
94 |
return r.read().strip()
|
95 |
|
96 |
+
|
97 |
+
def get_frankenstein(): # noqa: ANN201
|
98 |
+
with open("frankenstein5k.md") as r: # noqa: PTH123
|
99 |
return r.read().strip()
|
100 |
|
101 |
+
|
102 |
CHOICES = {
|
103 |
+
"🇺🇸 🚺 Heart ❤️": "af_heart",
|
104 |
+
"🇺🇸 🚺 Bella 🔥": "af_bella",
|
105 |
+
"🇺🇸 🚺 Nicole 🎧": "af_nicole",
|
106 |
+
"🇺🇸 🚺 Aoede": "af_aoede",
|
107 |
+
"🇺🇸 🚺 Kore": "af_kore",
|
108 |
+
"🇺🇸 🚺 Sarah": "af_sarah",
|
109 |
+
"🇺🇸 🚺 Nova": "af_nova",
|
110 |
+
"🇺🇸 🚺 Sky": "af_sky",
|
111 |
+
"🇺🇸 🚺 Alloy": "af_alloy",
|
112 |
+
"🇺🇸 🚺 Jessica": "af_jessica",
|
113 |
+
"🇺🇸 🚺 River": "af_river",
|
114 |
+
"🇺🇸 🚹 Michael": "am_michael",
|
115 |
+
"🇺🇸 🚹 Fenrir": "am_fenrir",
|
116 |
+
"🇺🇸 🚹 Puck": "am_puck",
|
117 |
+
"🇺🇸 🚹 Echo": "am_echo",
|
118 |
+
"🇺🇸 🚹 Eric": "am_eric",
|
119 |
+
"🇺🇸 🚹 Liam": "am_liam",
|
120 |
+
"🇺🇸 🚹 Onyx": "am_onyx",
|
121 |
+
"🇺🇸 🚹 Santa": "am_santa",
|
122 |
+
"🇺🇸 🚹 Adam": "am_adam",
|
123 |
+
"🇬🇧 🚺 Emma": "bf_emma",
|
124 |
+
"🇬🇧 🚺 Isabella": "bf_isabella",
|
125 |
+
"🇬🇧 🚺 Alice": "bf_alice",
|
126 |
+
"🇬🇧 🚺 Lily": "bf_lily",
|
127 |
+
"🇬🇧 🚹 George": "bm_george",
|
128 |
+
"🇬🇧 🚹 Fable": "bm_fable",
|
129 |
+
"🇬🇧 🚹 Lewis": "bm_lewis",
|
130 |
+
"🇬🇧 🚹 Daniel": "bm_daniel",
|
131 |
}
|
132 |
for v in CHOICES.values():
|
133 |
pipelines[v[0]].load_voice(v)
|
134 |
|
135 |
+
TOKEN_NOTE = """
|
136 |
💡 Customize pronunciation with Markdown link syntax and /slashes/ like `[Kokoro](/kˈOkəɹO/)`
|
137 |
|
138 |
💬 To adjust intonation, try punctuation `;:,.!?—…"()“”` or stress `ˈ` and `ˌ`
|
|
|
140 |
⬇️ Lower stress `[1 level](-1)` or `[2 levels](-2)`
|
141 |
|
142 |
⬆️ Raise stress 1 level `[or](+2)` 2 levels (only works on less stressed, usually short words)
|
143 |
+
""" # noqa: S105, RUF001
|
144 |
|
145 |
with gr.Blocks() as generate_tab:
|
146 |
+
out_audio = gr.Audio(label="Output Audio", interactive=False, streaming=False, autoplay=True)
|
147 |
+
generate_btn = gr.Button("Generate", variant="primary")
|
148 |
+
with gr.Accordion("Output Tokens", open=True):
|
149 |
+
out_ps = gr.Textbox(
|
150 |
+
interactive=False, show_label=False, info="Tokens used to generate the audio, up to 510 context length."
|
151 |
+
)
|
152 |
+
tokenize_btn = gr.Button("Tokenize", variant="secondary")
|
153 |
gr.Markdown(TOKEN_NOTE)
|
154 |
+
predict_btn = gr.Button("Predict", variant="secondary", visible=False)
|
155 |
|
156 |
+
STREAM_NOTE_LIST = ["⚠️ There is an unknown Gradio bug that might yield no audio the first time you click `Stream`."]
|
157 |
if CHAR_LIMIT is not None:
|
158 |
+
STREAM_NOTE_LIST.append(f"✂️ Each stream is capped at {CHAR_LIMIT} characters.")
|
159 |
+
STREAM_NOTE_LIST.append(
|
160 |
+
"🚀 Want more characters? You can [use Kokoro directly](https://huggingface.co/hexgrad/Kokoro-82M#usage) or duplicate this space:"
|
161 |
+
)
|
162 |
+
STREAM_NOTE = "\n\n".join(STREAM_NOTE_LIST)
|
163 |
|
164 |
with gr.Blocks() as stream_tab:
|
165 |
+
out_stream = gr.Audio(label="Output Audio Stream", interactive=False, streaming=True, autoplay=True)
|
166 |
with gr.Row():
|
167 |
+
stream_btn = gr.Button("Stream", variant="primary")
|
168 |
+
stop_btn = gr.Button("Stop", variant="stop")
|
169 |
+
with gr.Accordion("Note", open=True):
|
170 |
gr.Markdown(STREAM_NOTE)
|
171 |
gr.DuplicateButton()
|
172 |
|
173 |
+
BANNER_TEXT = """
|
174 |
[***Kokoro*** **is an open-weight TTS model with 82 million parameters.**](https://huggingface.co/hexgrad/Kokoro-82M)
|
175 |
|
176 |
This demo only showcases English, but you can directly use the model to access other languages.
|
177 |
+
"""
|
178 |
+
API_OPEN = os.getenv("SPACE_ID") != "hexgrad/Kokoro-TTS"
|
179 |
API_NAME = None if API_OPEN else False
|
180 |
with gr.Blocks() as app:
|
181 |
with gr.Row():
|
182 |
gr.Markdown(BANNER_TEXT, container=True)
|
183 |
with gr.Row():
|
184 |
with gr.Column():
|
185 |
+
text = gr.Textbox(
|
186 |
+
label="Input Text",
|
187 |
+
info=f"Up to ~500 characters per Generate, or {'∞' if CHAR_LIMIT is None else CHAR_LIMIT} characters per Stream",
|
188 |
+
)
|
189 |
with gr.Row():
|
190 |
+
voice = gr.Dropdown(
|
191 |
+
list(CHOICES.items()),
|
192 |
+
value="af_heart",
|
193 |
+
label="Voice",
|
194 |
+
info="Quality and availability vary by language",
|
195 |
+
)
|
196 |
use_gpu = gr.Dropdown(
|
197 |
+
[("ZeroGPU 🚀", True), ("CPU 🐌", False)],
|
198 |
value=CUDA_AVAILABLE,
|
199 |
+
label="Hardware",
|
200 |
+
info="GPU is usually faster, but has a usage quota",
|
201 |
+
interactive=CUDA_AVAILABLE,
|
202 |
)
|
203 |
+
speed = gr.Slider(minimum=0.5, maximum=2, value=1, step=0.1, label="Speed")
|
204 |
+
random_btn = gr.Button("🎲 Random Quote 💬", variant="secondary")
|
205 |
with gr.Row():
|
206 |
+
gatsby_btn = gr.Button("🥂 Gatsby 📕", variant="secondary")
|
207 |
+
frankenstein_btn = gr.Button("💀 Frankenstein 📗", variant="secondary")
|
208 |
with gr.Column():
|
209 |
+
gr.TabbedInterface([generate_tab, stream_tab], ["Generate", "Stream"])
|
210 |
random_btn.click(fn=get_random_quote, inputs=[], outputs=[text], api_name=API_NAME)
|
211 |
gatsby_btn.click(fn=get_gatsby, inputs=[], outputs=[text], api_name=API_NAME)
|
212 |
frankenstein_btn.click(fn=get_frankenstein, inputs=[], outputs=[text], api_name=API_NAME)
|
213 |
+
generate_btn.click(
|
214 |
+
fn=generate_first, inputs=[text, voice, speed, use_gpu], outputs=[out_audio, out_ps], api_name=API_NAME
|
215 |
+
)
|
216 |
tokenize_btn.click(fn=tokenize_first, inputs=[text, voice], outputs=[out_ps], api_name=API_NAME)
|
217 |
+
stream_event = stream_btn.click(
|
218 |
+
fn=generate_all, inputs=[text, voice, speed, use_gpu], outputs=[out_stream], api_name=API_NAME
|
219 |
+
)
|
220 |
stop_btn.click(fn=None, cancels=stream_event)
|
221 |
predict_btn.click(fn=predict, inputs=[text, voice, speed], outputs=[out_audio], api_name=API_NAME)
|
222 |
|
223 |
+
if __name__ == "__main__":
|
224 |
app.queue(api_open=API_OPEN).launch(show_api=API_OPEN, ssr_mode=True)
|
en.txt
CHANGED
@@ -2120,4 +2120,4 @@ Your sacred space is where you can find yourself again and again.
|
|
2120 |
Your talent is God's gift to you. What you do with it is your gift back to God.
|
2121 |
Your vision will become clear only when you can look into your own heart. Who looks outside, dreams, who looks inside, awakes.
|
2122 |
Your vision will become clear only when you look into your heart. Who looks outside, dreams. Who looks inside, awakens.
|
2123 |
-
Your worst enemy cannot harm you as much as your own unguarded thoughts.
|
|
|
2120 |
Your talent is God's gift to you. What you do with it is your gift back to God.
|
2121 |
Your vision will become clear only when you can look into your own heart. Who looks outside, dreams, who looks inside, awakes.
|
2122 |
Your vision will become clear only when you look into your heart. Who looks outside, dreams. Who looks inside, awakens.
|
2123 |
+
Your worst enemy cannot harm you as much as your own unguarded thoughts.
|
frankenstein5k.md
CHANGED
@@ -8,4 +8,4 @@ These visions faded when I perused, for the first time, those poets whose effusi
|
|
8 |
|
9 |
Six years have passed since I resolved on my present undertaking. I can, even now, remember the hour from which I dedicated myself to this great enterprise. I commenced by inuring my body to hardship. I accompanied the whale-fishers on several expeditions to the North Sea; I voluntarily endured cold, famine, thirst, and want of sleep; I often worked harder than the common sailors during the day and devoted my nights to the study of mathematics, the theory of medicine, and those branches of physical science from which a naval adventurer might derive the greatest practical advantage. Twice I actually hired myself as an under-mate in a Greenland whaler, and acquitted myself to admiration. I must own I felt a little proud when my captain offered me the second dignity in the vessel and entreated me to remain with the greatest earnestness, so valuable did he consider my services.
|
10 |
|
11 |
-
And now, dear Margaret, do I not deserve to accomplish some great purpose?
|
|
|
8 |
|
9 |
Six years have passed since I resolved on my present undertaking. I can, even now, remember the hour from which I dedicated myself to this great enterprise. I commenced by inuring my body to hardship. I accompanied the whale-fishers on several expeditions to the North Sea; I voluntarily endured cold, famine, thirst, and want of sleep; I often worked harder than the common sailors during the day and devoted my nights to the study of mathematics, the theory of medicine, and those branches of physical science from which a naval adventurer might derive the greatest practical advantage. Twice I actually hired myself as an under-mate in a Greenland whaler, and acquitted myself to admiration. I must own I felt a little proud when my captain offered me the second dignity in the vessel and entreated me to remain with the greatest earnestness, so valuable did he consider my services.
|
10 |
|
11 |
+
And now, dear Margaret, do I not deserve to accomplish some great purpose?
|
gatsby5k.md
CHANGED
@@ -14,4 +14,4 @@ The practical thing was to find rooms in the city, but it was a warm season, and
|
|
14 |
|
15 |
It was lonely for a day or so until one morning some man, more recently arrived than I, stopped me on the road.
|
16 |
|
17 |
-
“How do you get to West Egg village?” he asked helplessly.
|
|
|
14 |
|
15 |
It was lonely for a day or so until one morning some man, more recently arrived than I, stopped me on the road.
|
16 |
|
17 |
+
“How do you get to West Egg village?” he asked helplessly.
|
packages.txt
CHANGED
@@ -1 +1 @@
|
|
1 |
-
espeak-ng
|
|
|
1 |
+
espeak-ng
|