add space
Browse files- tokenizer_config.json +4 -7
tokenizer_config.json
CHANGED
@@ -33,18 +33,15 @@
|
|
33 |
"</s>"
|
34 |
],
|
35 |
"bos_token": "<s>",
|
36 |
-
"chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token + ' ' }}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
|
37 |
"clean_up_tokenization_spaces": false,
|
38 |
"eos_token": "</s>",
|
39 |
"legacy": true,
|
40 |
"model_max_length": 1000000000000000019884624838656,
|
41 |
-
"pad_token":
|
42 |
"sp_model_kwargs": {},
|
43 |
"spaces_between_special_tokens": false,
|
44 |
"tokenizer_class": "LlamaTokenizer",
|
45 |
-
"tokenizer_file": "/root/.cache/huggingface/hub/models--mistralai--Mistral-7B-Instruct-v0.1/snapshots/54766df6d50e4d3d7ccd66758e5341ba105a6d36/tokenizer.json",
|
46 |
-
"trust_remote_code": false,
|
47 |
"unk_token": "<unk>",
|
48 |
-
"use_default_system_prompt": true
|
49 |
-
|
50 |
-
}
|
|
|
33 |
"</s>"
|
34 |
],
|
35 |
"bos_token": "<s>",
|
36 |
+
"chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST] ' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token + ' ' }}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
|
37 |
"clean_up_tokenization_spaces": false,
|
38 |
"eos_token": "</s>",
|
39 |
"legacy": true,
|
40 |
"model_max_length": 1000000000000000019884624838656,
|
41 |
+
"pad_token": null,
|
42 |
"sp_model_kwargs": {},
|
43 |
"spaces_between_special_tokens": false,
|
44 |
"tokenizer_class": "LlamaTokenizer",
|
|
|
|
|
45 |
"unk_token": "<unk>",
|
46 |
+
"use_default_system_prompt": true
|
47 |
+
}
|
|