Fixed model name typo in README.md
Browse filesIn the huggingface transformers example, the model is referred to as 'Salesforce/Llama-xLAM-2-3b-fc-r' but should read 'Salesforce/xLAM-2-3b-fc-r', as is correctly listed when clicking the 'Use this model' dropdown.
README.md
CHANGED
@@ -97,8 +97,8 @@ The new xLAM models are designed to work seamlessly with the Hugging Face Transf
|
|
97 |
import torch
|
98 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
99 |
|
100 |
-
tokenizer = AutoTokenizer.from_pretrained("Salesforce/
|
101 |
-
model = AutoModelForCausalLM.from_pretrained("Salesforce/
|
102 |
|
103 |
# Example conversation with a tool call
|
104 |
messages = [
|
|
|
97 |
import torch
|
98 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
99 |
|
100 |
+
tokenizer = AutoTokenizer.from_pretrained("Salesforce/xLAM-2-3b-fc-r")
|
101 |
+
model = AutoModelForCausalLM.from_pretrained("Salesforce/xLAM-2-3b-fc-r", torch_dtype=torch.bfloat16, device_map="auto")
|
102 |
|
103 |
# Example conversation with a tool call
|
104 |
messages = [
|