Update mlc-chat-config.json
Browse files- mlc-chat-config.json +30 -2
mlc-chat-config.json
CHANGED
|
@@ -28,9 +28,37 @@
|
|
| 28 |
"temperature": 0.7,
|
| 29 |
"repetition_penalty": 1.1,
|
| 30 |
"top_p": 0.8,
|
| 31 |
-
"conv_template":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
"pad_token_id": 151643,
|
| 33 |
-
"bos_token_id":
|
| 34 |
"eos_token_id": 151643,
|
| 35 |
"tokenizer_files": [
|
| 36 |
"tokenizer.json",
|
|
|
|
| 28 |
"temperature": 0.7,
|
| 29 |
"repetition_penalty": 1.1,
|
| 30 |
"top_p": 0.8,
|
| 31 |
+
"conv_template": {
|
| 32 |
+
"name": "chatml",
|
| 33 |
+
"system_template": "<|im_start|>system\n{system_message}",
|
| 34 |
+
"system_message": "A conversation between a user and an LLM-based AI assistant. The assistant gives helpful and honest answers.",
|
| 35 |
+
"add_role_after_system_message": true,
|
| 36 |
+
"roles": {
|
| 37 |
+
"user": "<|im_start|>user",
|
| 38 |
+
"assistant": "<|im_start|>assistant"
|
| 39 |
+
},
|
| 40 |
+
"role_templates": {
|
| 41 |
+
"user": "{user_message}",
|
| 42 |
+
"assistant": "{assistant_message}",
|
| 43 |
+
"tool": "{tool_message}"
|
| 44 |
+
},
|
| 45 |
+
"messages": [],
|
| 46 |
+
"seps": [
|
| 47 |
+
"<|im_end|>\n"
|
| 48 |
+
],
|
| 49 |
+
"role_content_sep": "\n",
|
| 50 |
+
"role_empty_sep": "\n",
|
| 51 |
+
"stop_str": [
|
| 52 |
+
"<|im_end|>"
|
| 53 |
+
],
|
| 54 |
+
"stop_token_ids": [
|
| 55 |
+
151643
|
| 56 |
+
],
|
| 57 |
+
"function_string": "",
|
| 58 |
+
"use_function_calling": false
|
| 59 |
+
},
|
| 60 |
"pad_token_id": 151643,
|
| 61 |
+
"bos_token_id": 151643,
|
| 62 |
"eos_token_id": 151643,
|
| 63 |
"tokenizer_files": [
|
| 64 |
"tokenizer.json",
|