adeo commited on
Commit
ff52835
·
verified ·
1 Parent(s): 6c757f1

Model save

Browse files
README.md CHANGED
@@ -27,7 +27,7 @@ print(output["generated_text"])
27
 
28
  ## Training procedure
29
 
30
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/bootpin/huggingface/runs/lg4lg76q)
31
 
32
  This model was trained with SFT.
33
 
 
27
 
28
  ## Training procedure
29
 
30
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/bootpin/huggingface/runs/q94wllqj)
31
 
32
  This model was trained with SFT.
33
 
all_results.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
- "epoch": 1.7142857142857144,
3
- "total_flos": 1113549778714624.0,
4
- "train_loss": 19.26901610692342,
5
- "train_runtime": 77.567,
6
- "train_samples": 460341,
7
- "train_samples_per_second": 1.444,
8
- "train_steps_per_second": 0.077
9
  }
 
1
  {
2
+ "epoch": 1.9995261786306564,
3
+ "total_flos": 1.174779724933628e+18,
4
+ "train_loss": 1.0698493373337514,
5
+ "train_runtime": 77836.8266,
6
+ "train_samples": 460142,
7
+ "train_samples_per_second": 1.301,
8
+ "train_steps_per_second": 0.081
9
  }
config.json CHANGED
@@ -5,8 +5,8 @@
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
- "bos_token_id": 0,
9
- "eos_token_id": 0,
10
  "head_dim": 64,
11
  "hidden_act": "silu",
12
  "hidden_size": 960,
@@ -19,6 +19,7 @@
19
  "num_attention_heads": 15,
20
  "num_hidden_layers": 32,
21
  "num_key_value_heads": 5,
 
22
  "pretraining_tp": 1,
23
  "rms_norm_eps": 1e-05,
24
  "rope_interleaved": false,
 
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
  "head_dim": 64,
11
  "hidden_act": "silu",
12
  "hidden_size": 960,
 
19
  "num_attention_heads": 15,
20
  "num_hidden_layers": 32,
21
  "num_key_value_heads": 5,
22
+ "pad_token_id": 2,
23
  "pretraining_tp": 1,
24
  "rms_norm_eps": 1e-05,
25
  "rope_interleaved": false,
generation_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 0,
4
- "eos_token_id": 0,
 
5
  "transformers_version": "4.46.3"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
  "transformers_version": "4.46.3"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa4a3d79f65de914ec12916186e80c8c6aa99a03a6d8075c215a713c8705cf07
3
  size 723674912
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05abbb502b69a004c24f32e108475eeeb2694852026cd58bdcfe3319c9665216
3
  size 723674912
runs/Apr03_23-32-22_afc374fd6ab1/events.out.tfevents.1743723195.afc374fd6ab1.20983.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0791cd12441ccb2769383cd599bc8ca762779a44c2e9fc379e9b29e17e0f6b3c
3
+ size 273720
special_tokens_map.json CHANGED
@@ -1,29 +1,23 @@
1
  {
2
  "additional_special_tokens": [
3
- "<|im_start|>",
4
- "<|im_end|>"
 
 
 
 
 
 
 
 
 
 
 
 
5
  ],
6
- "bos_token": {
7
- "content": "<|im_start|>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false
12
- },
13
- "eos_token": {
14
- "content": "<|im_end|>",
15
- "lstrip": false,
16
- "normalized": false,
17
- "rstrip": false,
18
- "single_word": false
19
- },
20
- "pad_token": {
21
- "content": "<|im_end|>",
22
- "lstrip": false,
23
- "normalized": false,
24
- "rstrip": false,
25
- "single_word": false
26
- },
27
  "unk_token": {
28
  "content": "<|endoftext|>",
29
  "lstrip": false,
 
1
  {
2
  "additional_special_tokens": [
3
+ {
4
+ "content": "<|im_start|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "<|im_end|>",
12
+ "lstrip": false,
13
+ "normalized": false,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
  ],
18
+ "bos_token": "<|im_start|>",
19
+ "eos_token": "<|im_end|>",
20
+ "pad_token": "<|im_end|>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  "unk_token": {
22
  "content": "<|endoftext|>",
23
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -143,7 +143,7 @@
143
  "<|im_end|>"
144
  ],
145
  "bos_token": "<|im_start|>",
146
- "chat_template": "",
147
  "clean_up_tokenization_spaces": false,
148
  "eos_token": "<|im_end|>",
149
  "model_max_length": 8192,
 
143
  "<|im_end|>"
144
  ],
145
  "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
  "clean_up_tokenization_spaces": false,
148
  "eos_token": "<|im_end|>",
149
  "model_max_length": 8192,
train_results.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
- "epoch": 1.7142857142857144,
3
- "total_flos": 1113549778714624.0,
4
- "train_loss": 19.26901610692342,
5
- "train_runtime": 77.567,
6
- "train_samples": 460341,
7
- "train_samples_per_second": 1.444,
8
- "train_steps_per_second": 0.077
9
  }
 
1
  {
2
+ "epoch": 1.9995261786306564,
3
+ "total_flos": 1.174779724933628e+18,
4
+ "train_loss": 1.0698493373337514,
5
+ "train_runtime": 77836.8266,
6
+ "train_samples": 460142,
7
+ "train_samples_per_second": 1.301,
8
+ "train_steps_per_second": 0.081
9
  }
trainer_state.json CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:faa19ffaf1e2ddc9132de277ead37eb9ac3104cc1baaf3c94cbde2c2fa8781b7
3
- size 7160
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9807ad48581fd1f16b2eb8e994fec16526cdca9d9f278e4cbce52b1e8fd15cf
3
+ size 6904