nielsr HF Staff commited on
Commit
4e7eb0a
·
verified ·
1 Parent(s): b7d7025

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -6
README.md CHANGED
@@ -68,9 +68,7 @@ sampling_params = SamplingParams(
68
  max_tokens=8192,
69
  )
70
  llm = LLM(model="Unbabel/Tower-Plus-72B", tensor_parallel_size=4)
71
- messages = [{"role": "user", "content": "Translate the following English source text to Portuguese (Portugal):
72
- English: Hello world!
73
- Portuguese (Portugal): "}]
74
  outputs = llm.chat(messages, sampling_params)
75
  # Make sure your prompt_token_ids look like this
76
  print (outputs[0].outputs[0].text)
@@ -87,9 +85,7 @@ from transformers import pipeline
87
 
88
  pipe = pipeline("text-generation", model="Unbabel/Tower-Plus-72B", device_map="auto")
89
  # We use the tokenizer’s chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
90
- messages = [{"role": "user", "content": "Translate the following English source text to Portuguese (Portugal):
91
- English: Hello world!
92
- Portuguese (Portugal): "}]
93
  input_ids = pipe.tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True)
94
  outputs = pipe(messages, max_new_tokens=256, do_sample=False)
95
  print(outputs[0]["generated_text"])
 
68
  max_tokens=8192,
69
  )
70
  llm = LLM(model="Unbabel/Tower-Plus-72B", tensor_parallel_size=4)
71
+ messages = [{"role": "user", "content": "Translate the following English source text to Portuguese (Portugal):\nEnglish: Hello world!\nPortuguese (Portugal): "}]
 
 
72
  outputs = llm.chat(messages, sampling_params)
73
  # Make sure your prompt_token_ids look like this
74
  print (outputs[0].outputs[0].text)
 
85
 
86
  pipe = pipeline("text-generation", model="Unbabel/Tower-Plus-72B", device_map="auto")
87
  # We use the tokenizer’s chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
88
+ messages = [{"role": "user", "content": "Translate the following English source text to Portuguese (Portugal):\nEnglish: Hello world!\nPortuguese (Portugal): "}]
 
 
89
  input_ids = pipe.tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True)
90
  outputs = pipe(messages, max_new_tokens=256, do_sample=False)
91
  print(outputs[0]["generated_text"])