Spaces:
Sleeping
Sleeping
Update src/translate/Translate.py
Browse files- src/translate/Translate.py +10 -1
src/translate/Translate.py
CHANGED
@@ -2,6 +2,8 @@ from nltk.tokenize import sent_tokenize
|
|
2 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
3 |
import torch
|
4 |
import src.exception.Exception.Exception as ExceptionCustom
|
|
|
|
|
5 |
|
6 |
METHOD = "TRANSLATE"
|
7 |
|
@@ -49,4 +51,11 @@ def paraphraseTranslateMethod(requestValue: str, model: str):
|
|
49 |
result = tokenizerENGROM.batch_decode(output, skip_special_tokens=True)[0]
|
50 |
result_value.append(result)
|
51 |
|
52 |
-
return " ".join(result_value).strip(), model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
3 |
import torch
|
4 |
import src.exception.Exception.Exception as ExceptionCustom
|
5 |
+
# Use a pipeline as a high-level helper
|
6 |
+
from transformers import pipeline
|
7 |
|
8 |
METHOD = "TRANSLATE"
|
9 |
|
|
|
51 |
result = tokenizerENGROM.batch_decode(output, skip_special_tokens=True)[0]
|
52 |
result_value.append(result)
|
53 |
|
54 |
+
return " ".join(result_value).strip(), model
|
55 |
+
|
56 |
+
def gemma(requestValue: str, model: str = 'Gargaz/gemma-2b-romanian-better'):
|
57 |
+
pipe = pipeline("text-generation", model="Gargaz/gemma-2b-romanian-better")
|
58 |
+
messages = [
|
59 |
+
{"role": "user", "content": f"Translate the following text to Romanian using a formal tone and provide only translation: {requestValue}"},
|
60 |
+
]
|
61 |
+
return pipe(messages)
|