Allen Poston commited on
Commit
d50a389
·
verified ·
1 Parent(s): bde19e6

Training update v1.2 - 3/13 datasets completed

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +0 -30
  2. tokenizer.json +1 -46
  3. tokenizer_config.json +0 -38
special_tokens_map.json CHANGED
@@ -1,34 +1,4 @@
1
  {
2
- "additional_special_tokens": [
3
- {
4
- "content": "<|human|>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false
9
- },
10
- {
11
- "content": "<|assistant|>",
12
- "lstrip": false,
13
- "normalized": false,
14
- "rstrip": false,
15
- "single_word": false
16
- },
17
- {
18
- "content": "<|system|>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- {
25
- "content": "<|context|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
- ],
32
  "bos_token": {
33
  "content": "<|endoftext|>",
34
  "lstrip": false,
 
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": {
3
  "content": "<|endoftext|>",
4
  "lstrip": false,
tokenizer.json CHANGED
@@ -6,16 +6,7 @@
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 256
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 50256,
16
- "pad_type_id": 0,
17
- "pad_token": "<|endoftext|>"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 50256,
@@ -25,42 +16,6 @@
25
  "rstrip": false,
26
  "normalized": true,
27
  "special": true
28
- },
29
- {
30
- "id": 50257,
31
- "content": "<|human|>",
32
- "single_word": false,
33
- "lstrip": false,
34
- "rstrip": false,
35
- "normalized": false,
36
- "special": true
37
- },
38
- {
39
- "id": 50258,
40
- "content": "<|assistant|>",
41
- "single_word": false,
42
- "lstrip": false,
43
- "rstrip": false,
44
- "normalized": false,
45
- "special": true
46
- },
47
- {
48
- "id": 50259,
49
- "content": "<|system|>",
50
- "single_word": false,
51
- "lstrip": false,
52
- "rstrip": false,
53
- "normalized": false,
54
- "special": true
55
- },
56
- {
57
- "id": 50260,
58
- "content": "<|context|>",
59
- "single_word": false,
60
- "lstrip": false,
61
- "rstrip": false,
62
- "normalized": false,
63
- "special": true
64
  }
65
  ],
66
  "normalizer": null,
 
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
+ "padding": null,
 
 
 
 
 
 
 
 
 
10
  "added_tokens": [
11
  {
12
  "id": 50256,
 
16
  "rstrip": false,
17
  "normalized": true,
18
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  }
20
  ],
21
  "normalizer": null,
tokenizer_config.json CHANGED
@@ -9,46 +9,8 @@
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
12
- },
13
- "50257": {
14
- "content": "<|human|>",
15
- "lstrip": false,
16
- "normalized": false,
17
- "rstrip": false,
18
- "single_word": false,
19
- "special": true
20
- },
21
- "50258": {
22
- "content": "<|assistant|>",
23
- "lstrip": false,
24
- "normalized": false,
25
- "rstrip": false,
26
- "single_word": false,
27
- "special": true
28
- },
29
- "50259": {
30
- "content": "<|system|>",
31
- "lstrip": false,
32
- "normalized": false,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": true
36
- },
37
- "50260": {
38
- "content": "<|context|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": true
44
  }
45
  },
46
- "additional_special_tokens": [
47
- "<|human|>",
48
- "<|assistant|>",
49
- "<|system|>",
50
- "<|context|>"
51
- ],
52
  "bos_token": "<|endoftext|>",
53
  "chat_template": "{% for message in messages %}{{ message.content }}{{ eos_token }}{% endfor %}",
54
  "clean_up_tokenization_spaces": true,
 
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  }
13
  },
 
 
 
 
 
 
14
  "bos_token": "<|endoftext|>",
15
  "chat_template": "{% for message in messages %}{{ message.content }}{{ eos_token }}{% endfor %}",
16
  "clean_up_tokenization_spaces": true,