Spaces:
Sleeping
Sleeping
Commit
·
9a0caf3
1
Parent(s):
834f6fb
perf: optimize CPU with optimum
Browse files- app.py +3 -0
- requirements.txt +1 -0
app.py
CHANGED
|
@@ -5,6 +5,7 @@ from indobenchmark import IndoNLGTokenizer
|
|
| 5 |
gpt_tokenizer = IndoNLGTokenizer.from_pretrained("indobenchmark/indogpt")
|
| 6 |
gpt_tokenizer.pad_token = gpt_tokenizer.eos_token
|
| 7 |
kancilgpt = GPT2LMHeadModel.from_pretrained("abdiharyadi/kancilgpt")
|
|
|
|
| 8 |
|
| 9 |
def generate_story():
|
| 10 |
stop = False
|
|
@@ -87,6 +88,8 @@ def generate_story():
|
|
| 87 |
total_isi = isi
|
| 88 |
|
| 89 |
print("We skip the rest of the part for debug.")
|
|
|
|
|
|
|
| 90 |
# ellipsis = "..."
|
| 91 |
# while not end_part.startswith("tamat"):
|
| 92 |
# yield judul + "\n" + ("-" * len(judul)) + "\n" + total_isi + f" {ellipsis}"
|
|
|
|
| 5 |
gpt_tokenizer = IndoNLGTokenizer.from_pretrained("indobenchmark/indogpt")
|
| 6 |
gpt_tokenizer.pad_token = gpt_tokenizer.eos_token
|
| 7 |
kancilgpt = GPT2LMHeadModel.from_pretrained("abdiharyadi/kancilgpt")
|
| 8 |
+
kancilgpt.to_bettertransformer()
|
| 9 |
|
| 10 |
def generate_story():
|
| 11 |
stop = False
|
|
|
|
| 88 |
total_isi = isi
|
| 89 |
|
| 90 |
print("We skip the rest of the part for debug.")
|
| 91 |
+
|
| 92 |
+
# TODO: Solve this.
|
| 93 |
# ellipsis = "..."
|
| 94 |
# while not end_part.startswith("tamat"):
|
| 95 |
# yield judul + "\n" + ("-" * len(judul)) + "\n" + total_isi + f" {ellipsis}"
|
requirements.txt
CHANGED
|
@@ -2,3 +2,4 @@ transformers
|
|
| 2 |
sentencepiece
|
| 3 |
datasets
|
| 4 |
torch
|
|
|
|
|
|
| 2 |
sentencepiece
|
| 3 |
datasets
|
| 4 |
torch
|
| 5 |
+
optimum
|