Update README.md
Browse files
README.md
CHANGED
@@ -17,8 +17,8 @@ The model was trained with the Olmo2 7B architecture and pretraining data. It ha
|
|
17 |
```
|
18 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
19 |
|
20 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
21 |
-
model = AutoModelForCausalLM.from_pretrained("
|
22 |
|
23 |
tokenizer.convert_ids_to_tokens(tokenizer.encode("By the way, I am a fan of the Milky Way."))
|
24 |
# ['ByĠtheĠway', ',ĠIĠamĠa', 'ĠfanĠofĠthe', 'ĠMilkyĠWay', '.']
|
|
|
17 |
```
|
18 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
19 |
|
20 |
+
tokenizer = AutoTokenizer.from_pretrained("UW/OLMo2-8B-SuperBPE-t160k")
|
21 |
+
model = AutoModelForCausalLM.from_pretrained("UW/OLMo2-8B-SuperBPE-t160k")
|
22 |
|
23 |
tokenizer.convert_ids_to_tokens(tokenizer.encode("By the way, I am a fan of the Milky Way."))
|
24 |
# ['ByĠtheĠway', ',ĠIĠamĠa', 'ĠfanĠofĠthe', 'ĠMilkyĠWay', '.']
|