nouman66 commited on
Commit
f471c69
·
1 Parent(s): 8549dc7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -27
app.py CHANGED
@@ -1,34 +1,25 @@
1
 
2
  import streamlit as st
3
- from transformers import pipeline
4
 
5
- def main():
6
- st.title("Multilingual Translator")
 
 
7
 
8
- # Get user input
9
- input_text = st.text_area("Enter text to translate:")
10
 
11
- # Select source and target languages
12
- source_lang = st.selectbox("Select source language:", get_languages())
13
- target_lang = st.selectbox("Select target language:", get_languages())
14
 
15
- # Translate text
16
- if st.button("Translate"):
17
- if input_text:
18
- translation = translate_text(input_text, source_lang, target_lang)
19
- st.success(f"Translated text: {translation}")
20
- else:
21
- st.warning("Please enter text to translate.")
22
 
23
- def get_languages():
24
- # You can customize this list based on the languages you want to support
25
- return ["en", "es", "fr", "de", "it", "ja", "ko", "zh-CN"]
26
-
27
- def translate_text(text, source_lang, target_lang):
28
- translator = pipeline(task="translation", model=f"{source_lang}-to-{target_lang}")
29
- translation = translator(text, max_length=500)[0]['translation_text']
30
- return translation
31
-
32
- if __name__ == "__main__":
33
- main()
34
- streamlit run app.py
 
1
 
2
  import streamlit as st
3
+ from transformers import MBartForConditionalGeneration, MBart50TokenizerFast
4
 
5
+ # Load the multilingual translation model and tokenizer
6
+ model_name = "facebook/mbart-large-50" # Choose a suitable model
7
+ tokenizer = MBart50TokenizerFast.from_pretrained(model_name)
8
+ model = MBartForConditionalGeneration.from_pretrained(model_name)
9
 
10
+ # Create the Streamlit app interface
11
+ st.title("Multilingual Translator")
12
 
13
+ source_text = st.text_area("Enter text to translate")
14
+ target_language = st.selectbox("Choose target language", tokenizer.lang_codes.keys())
 
15
 
16
+ if st.button("Translate"):
17
+ translated_text = translate_text(model, tokenizer, source_text, target_language)
18
+ st.write("Translated text:", translated_text)
 
 
 
 
19
 
20
+ # Define the translation function
21
+ def translate_text(model, tokenizer, source_text, target_language):
22
+ inputs = tokenizer(source_text, return_tensors="pt")
23
+ outputs = model.generate(**inputs, forced_bos_token_id=tokenizer.lang_code_to_id[target_language])
24
+ translated_text = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0]
25
+ return translated_text